From b84b5525e1e615992e706c39b4d2d2d11e09bbb6 Mon Sep 17 00:00:00 2001 From: Peter Dyson Date: Tue, 21 May 2019 09:43:01 +1000 Subject: [PATCH 001/210] [DOCS] path_hierarchy tokenizer examples (#39630) Closes #17138 --- docs/reference/analysis/tokenizers.asciidoc | 4 + .../pathhierarchy-tokenizer-examples.asciidoc | 193 ++++++++++++++++++ .../pathhierarchy-tokenizer.asciidoc | 4 + 3 files changed, 201 insertions(+) create mode 100644 docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc diff --git a/docs/reference/analysis/tokenizers.asciidoc b/docs/reference/analysis/tokenizers.asciidoc index d6f15ded05f..628afebfdcb 100644 --- a/docs/reference/analysis/tokenizers.asciidoc +++ b/docs/reference/analysis/tokenizers.asciidoc @@ -155,3 +155,7 @@ include::tokenizers/simplepattern-tokenizer.asciidoc[] include::tokenizers/simplepatternsplit-tokenizer.asciidoc[] include::tokenizers/pathhierarchy-tokenizer.asciidoc[] + +include::tokenizers/pathhierarchy-tokenizer-examples.asciidoc[] + + diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc new file mode 100644 index 00000000000..c93d9188221 --- /dev/null +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc @@ -0,0 +1,193 @@ +[[analysis-pathhierarchy-tokenizer-examples]] +=== Path Hierarchy Tokenizer Examples + +A common use-case for the `path_hierarchy` tokenizer is filtering results by +file paths. If indexing a file path along with the data, the use of the +`path_hierarchy` tokenizer to analyze the path allows filtering the results +by different parts of the file path string. + + +This example configures an index to have two custom analyzers and applies +those analyzers to multifields of the `file_path` text field that will +store filenames. One of the two analyzers uses reverse tokenization. +Some sample documents are then indexed to represent some file paths +for photos inside photo folders of two different users. + + +[source,js] +-------------------------------------------------- +PUT file-path-test +{ + "settings": { + "analysis": { + "analyzer": { + "custom_path_tree": { + "tokenizer": "custom_hierarchy" + }, + "custom_path_tree_reversed": { + "tokenizer": "custom_hierarchy_reversed" + } + }, + "tokenizer": { + "custom_hierarchy": { + "type": "path_hierarchy", + "delimiter": "/" + }, + "custom_hierarchy_reversed": { + "type": "path_hierarchy", + "delimiter": "/", + "reverse": "true" + } + } + } + }, + "mappings": { + "_doc": { + "properties": { + "file_path": { + "type": "text", + "fields": { + "tree": { + "type": "text", + "analyzer": "custom_path_tree" + }, + "tree_reversed": { + "type": "text", + "analyzer": "custom_path_tree_reversed" + } + } + } + } + } + } +} + +POST file-path-test/_doc/1 +{ + "file_path": "/User/alice/photos/2017/05/16/my_photo1.jpg" +} + +POST file-path-test/_doc/2 +{ + "file_path": "/User/alice/photos/2017/05/16/my_photo2.jpg" +} + +POST file-path-test/_doc/3 +{ + "file_path": "/User/alice/photos/2017/05/16/my_photo3.jpg" +} + +POST file-path-test/_doc/4 +{ + "file_path": "/User/alice/photos/2017/05/15/my_photo1.jpg" +} + +POST file-path-test/_doc/5 +{ + "file_path": "/User/bob/photos/2017/05/16/my_photo1.jpg" +} +-------------------------------------------------- +// CONSOLE +// TESTSETUP + + +A search for a particular file path string against the text field matches all +the example documents, with Bob's documents ranking highest due to `bob` also +being one of the terms created by the standard analyzer boosting relevance for +Bob's documents. + +[source,js] +-------------------------------------------------- +GET file-path-test/_search +{ + "query": { + "match": { + "file_path": "/User/bob/photos/2017/05" + } + } +} +-------------------------------------------------- +// CONSOLE + + +It's simple to match or filter documents with file paths that exist within a +particular directory using the `file_path.tree` field. + +[source,js] +-------------------------------------------------- +GET file-path-test/_search +{ + "query": { + "term": { + "file_path.tree": "/User/alice/photos/2017/05/16" + } + } +} +-------------------------------------------------- +// CONSOLE + +With the reverse parameter for this tokenizer, it's also possible to match +from the other end of the file path, such as individual file names or a deep +level subdirectory. The following example shows a search for all files named +`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field +configured to use the reverse parameter in the mapping. + + +[source,js] +-------------------------------------------------- +GET file-path-test/_search +{ + "query": { + "term": { + "file_path.tree_reversed": { + "value": "my_photo1.jpg" + } + } + } +} +-------------------------------------------------- +// CONSOLE + + +Viewing the tokens generated with both forward and reverse is instructive +in showing the tokens created for the same file path value. + + +[source,js] +-------------------------------------------------- +POST file-path-test/_analyze +{ + "analyzer": "custom_path_tree", + "text": "/User/alice/photos/2017/05/16/my_photo1.jpg" +} + +POST file-path-test/_analyze +{ + "analyzer": "custom_path_tree_reversed", + "text": "/User/alice/photos/2017/05/16/my_photo1.jpg" +} +-------------------------------------------------- +// CONSOLE + + +It's also useful to be able to filter with file paths when combined with other +types of searches, such as this example looking for any files paths with `16` +that also must be in Alice's photo directory. + +[source,js] +-------------------------------------------------- +GET file-path-test/_search +{ + "query": { + "bool" : { + "must" : { + "match" : { "file_path" : "16" } + }, + "filter": { + "term" : { "file_path.tree" : "/User/alice" } + } + } + } +} +-------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index 55aa7d66da3..8d425197a2a 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -170,3 +170,7 @@ If we were to set `reverse` to `true`, it would produce the following: --------------------------- [ one/two/three/, two/three/, three/ ] --------------------------- + +[float] +=== Detailed Examples +See <>. From 5a76f46ac6850e18d5cdcf859096859a07518179 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 30 May 2019 10:16:58 -0400 Subject: [PATCH 002/210] Fix error with mapping in docs Related to #39630 --- .../pathhierarchy-tokenizer-examples.asciidoc | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc index c93d9188221..ee02d66e403 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer-examples.asciidoc @@ -42,19 +42,17 @@ PUT file-path-test } }, "mappings": { - "_doc": { - "properties": { - "file_path": { - "type": "text", - "fields": { - "tree": { - "type": "text", - "analyzer": "custom_path_tree" - }, - "tree_reversed": { - "type": "text", - "analyzer": "custom_path_tree_reversed" - } + "properties": { + "file_path": { + "type": "text", + "fields": { + "tree": { + "type": "text", + "analyzer": "custom_path_tree" + }, + "tree_reversed": { + "type": "text", + "analyzer": "custom_path_tree_reversed" } } } From 7cabe8acc9214634ea9a41d6d2f42064f0119bd6 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 30 May 2019 17:33:06 +0300 Subject: [PATCH 003/210] Fix refresh remote JWKS logic (#42662) This change ensures that: - We only attempt to refresh the remote JWKS when there is a signature related error only ( BadJWSException instead of the geric BadJOSEException ) - We do call OpenIDConnectAuthenticator#getUserClaims upon successful refresh. - We test this in OpenIdConnectAuthenticatorTests. Without this fix, when using the OpenID Connect realm with a remote JWKSet configured in `op.jwks_path`, the refresh would be triggered for most configuration errors ( i.e. wrong value for `op.issuer` ) and the kibana wouldn't get a response and timeout since `getUserClaims` wouldn't be called because `ReloadableJWKSource#reloadAsync` wouldn't call `onResponse` on the future. --- .../authc/oidc/OpenIdConnectAuthenticator.java | 6 ++++-- .../oidc/OpenIdConnectAuthenticatorTests.java | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index c652a39b909..6de933804f3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -12,6 +12,7 @@ import com.nimbusds.jose.jwk.JWKSelector; import com.nimbusds.jose.jwk.JWKSet; import com.nimbusds.jose.jwk.source.JWKSource; import com.nimbusds.jose.proc.BadJOSEException; +import com.nimbusds.jose.proc.BadJWSException; import com.nimbusds.jose.proc.JWSVerificationKeySelector; import com.nimbusds.jose.proc.SecurityContext; import com.nimbusds.jose.util.IOUtils; @@ -240,7 +241,7 @@ public class OpenIdConnectAuthenticator { } claimsListener.onResponse(enrichedVerifiedIdTokenClaims); } - } catch (BadJOSEException e) { + } catch (BadJWSException e) { // We only try to update the cached JWK set once if a remote source is used and // RSA or ECDSA is used for signatures if (shouldRetry @@ -256,7 +257,7 @@ public class OpenIdConnectAuthenticator { } else { claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e)); } - } catch (com.nimbusds.oauth2.sdk.ParseException | ParseException | JOSEException e) { + } catch (com.nimbusds.oauth2.sdk.ParseException | ParseException | BadJOSEException | JOSEException e) { claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e)); } } @@ -777,6 +778,7 @@ public class OpenIdConnectAuthenticator { StandardCharsets.UTF_8)); reloadFutureRef.set(null); LOGGER.trace("Successfully refreshed and cached remote JWKSet"); + future.onResponse(null); } catch (IOException | ParseException e) { failed(e); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java index 43b58b8d4b5..7a2fa9af039 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java @@ -88,6 +88,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { private Settings globalSettings; private Environment env; private ThreadContext threadContext; + private int callsToReloadJwk; @Before public void setup() { @@ -95,6 +96,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { .put("xpack.security.authc.realms.oidc.oidc-realm.ssl.verification_mode", "certificate").build(); env = TestEnvironment.newEnvironment(globalSettings); threadContext = new ThreadContext(globalSettings); + callsToReloadJwk = 0; } @After @@ -278,6 +280,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { authenticator.authenticate(token, future); JWTClaimsSet claimsSet = future.actionGet(); assertThat(claimsSet.getSubject(), equalTo(subject)); + assertThat(callsToReloadJwk, equalTo(0)); } public void testImplicitFlowFailsWithExpiredToken() throws Exception { @@ -317,6 +320,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWTException.class)); assertThat(e.getCause().getMessage(), containsString("Expired JWT")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testImplicitFlowFailsNotYetIssuedToken() throws Exception { @@ -356,6 +360,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWTException.class)); assertThat(e.getCause().getMessage(), containsString("JWT issue time ahead of current time")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testImplicitFlowFailsInvalidIssuer() throws Exception { @@ -394,6 +399,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWTException.class)); assertThat(e.getCause().getMessage(), containsString("Unexpected JWT issuer")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testImplicitFlowFailsInvalidAudience() throws Exception { @@ -432,6 +438,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWTException.class)); assertThat(e.getCause().getMessage(), containsString("Unexpected JWT audience")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testAuthenticateImplicitFlowFailsWithForgedRsaIdToken() throws Exception { @@ -456,6 +463,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWSException.class)); assertThat(e.getCause().getMessage(), containsString("Signed JWT rejected: Invalid signature")); + assertThat(callsToReloadJwk, equalTo(1)); } public void testAuthenticateImplicitFlowFailsWithForgedEcsdsaIdToken() throws Exception { @@ -480,6 +488,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWSException.class)); assertThat(e.getCause().getMessage(), containsString("Signed JWT rejected: Invalid signature")); + assertThat(callsToReloadJwk, equalTo(1)); } public void testAuthenticateImplicitFlowFailsWithForgedHmacIdToken() throws Exception { @@ -503,6 +512,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWSException.class)); assertThat(e.getCause().getMessage(), containsString("Signed JWT rejected: Invalid signature")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testAuthenticateImplicitFlowFailsWithForgedAccessToken() throws Exception { @@ -532,6 +542,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to verify access token")); assertThat(e.getCause(), instanceOf(InvalidHashException.class)); assertThat(e.getCause().getMessage(), containsString("Access token hash (at_hash) mismatch")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testImplicitFlowFailsWithNoneAlgorithm() throws Exception { @@ -569,6 +580,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJOSEException.class)); assertThat(e.getCause().getMessage(), containsString("Another algorithm expected, or no matching key(s) found")); + assertThat(callsToReloadJwk, equalTo(0)); } /** @@ -599,6 +611,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJOSEException.class)); assertThat(e.getCause().getMessage(), containsString("Another algorithm expected, or no matching key(s) found")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testImplicitFlowFailsWithUnsignedJwt() throws Exception { @@ -635,6 +648,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { assertThat(e.getMessage(), containsString("Failed to parse or validate the ID Token")); assertThat(e.getCause(), instanceOf(BadJWTException.class)); assertThat(e.getCause().getMessage(), containsString("Signed ID token expected")); + assertThat(callsToReloadJwk, equalTo(0)); } public void testJsonObjectMerging() throws Exception { @@ -832,6 +846,7 @@ public class OpenIdConnectAuthenticatorTests extends OpenIdConnectTestCase { Mockito.doAnswer(invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[0]; + callsToReloadJwk += 1; listener.onResponse(null); return null; }).when(jwkSource).triggerReload(any(ActionListener.class)); From 2a0c30c100034fcbcd68c495b26ce40643c3ef0d Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 24 May 2019 19:28:22 -0700 Subject: [PATCH 004/210] Make JarHell task cacheable (#42551) --- .../gradle/precommit/PrecommitTasks.groovy | 12 ++++----- .../gradle/precommit/JarHellTask.java | 25 +++++++------------ 2 files changed, 14 insertions(+), 23 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 0ef39e6e130..25218202bfc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -107,14 +107,12 @@ class PrecommitTasks { } private static Task configureJarHell(Project project) { - Task task = project.tasks.create('jarHell', JarHellTask.class) - task.classpath = project.sourceSets.test.runtimeClasspath - if (project.plugins.hasPlugin(ShadowPlugin)) { - task.classpath += project.configurations.bundle + return project.tasks.create('jarHell', JarHellTask) { task -> + task.classpath = project.sourceSets.test.runtimeClasspath + if (project.plugins.hasPlugin(ShadowPlugin)) { + task.classpath += project.configurations.bundle + } } - task.dependsOn(project.sourceSets.test.classesTaskName) - task.javaHome = project.runtimeJavaHome - return task } private static Task configureThirdPartyAudit(Project project) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/JarHellTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/JarHellTask.java index fd5b0c57907..c9152486a1c 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/JarHellTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/JarHellTask.java @@ -21,19 +21,20 @@ package org.elasticsearch.gradle.precommit; import org.elasticsearch.gradle.LoggedExec; import org.gradle.api.file.FileCollection; -import org.gradle.api.tasks.Classpath; -import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.CompileClasspath; import org.gradle.api.tasks.TaskAction; +import java.io.File; + /** * Runs CheckJarHell on a classpath. */ +@CacheableTask public class JarHellTask extends PrecommitTask { private FileCollection classpath; - private Object javaHome; - public JarHellTask() { setDescription("Runs CheckJarHell on the configured classpath"); } @@ -42,23 +43,15 @@ public class JarHellTask extends PrecommitTask { public void runJarHellCheck() { LoggedExec.javaexec(getProject(), spec -> { spec.classpath(getClasspath()); - spec.executable(getJavaHome() + "/bin/java"); spec.setMain("org.elasticsearch.bootstrap.JarHell"); }); } - @Input - public Object getJavaHome() { - return javaHome; - } - - public void setJavaHome(Object javaHome) { - this.javaHome = javaHome; - } - - @Classpath + // We use compile classpath normalization here because class implementation changes are irrelevant for the purposes of jar hell. + // We only care about the runtime classpath ABI here. + @CompileClasspath public FileCollection getClasspath() { - return classpath.filter(file -> file.exists()); + return classpath.filter(File::exists); } public void setClasspath(FileCollection classpath) { From bdd28cf4bbc76a0966f1319ef630a21f3ee36b3b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 May 2019 08:37:58 -0700 Subject: [PATCH 005/210] Remove unused Gradle plugin (#42684) --- .../gradle/test/MessyTestPlugin.groovy | 63 ------------------- .../elasticsearch.messy-test.properties | 20 ------ 2 files changed, 83 deletions(-) delete mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy delete mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy deleted file mode 100644 index 1c0aec1bc00..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.test - -import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.gradle.api.Project -import org.gradle.api.artifacts.Dependency -import org.gradle.api.artifacts.ProjectDependency -import org.gradle.api.tasks.Copy - -/** - * A plugin to run messy tests, which are generally tests that depend on plugins. - * - * This plugin will add the same test configuration as standalone tests, except - * also add the plugin-metadata and properties files for each plugin project - * dependency. - */ -class MessyTestPlugin extends StandaloneTestPlugin { - @Override - public void apply(Project project) { - super.apply(project) - - project.configurations.testCompile.dependencies.all { Dependency dep -> - // this closure is run every time a compile dependency is added - if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) { - project.gradle.projectsEvaluated { - addPluginResources(project, dep.dependencyProject) - } - } - } - } - - private static addPluginResources(Project project, Project pluginProject) { - String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}" - String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata") - Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class) - copyPluginMetadata.into(outputDir) - copyPluginMetadata.from(pluginProject.tasks.pluginProperties) - copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata')) - project.sourceSets.test.output.dir(outputDir, builtBy: taskName) - - // add each generated dir to the test classpath in IDEs - project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]] - // Eclipse doesn't need this because it gets the entire module as a dependency - } -} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties deleted file mode 100644 index 507a0f85a04..00000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -implementation-class=org.elasticsearch.gradle.test.MessyTestPlugin From ac8a9515a3c8bfb4ad769bc410ce1ebebd286002 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 May 2019 08:38:17 -0700 Subject: [PATCH 006/210] Remove usage of deprecated compare gradle builds plugin (#42687) --- build.gradle | 25 ------------------- .../elasticsearch/gradle/BuildPlugin.groovy | 4 --- 2 files changed, 29 deletions(-) diff --git a/build.gradle b/build.gradle index 9b4aa3ed9c7..93659101d84 100644 --- a/build.gradle +++ b/build.gradle @@ -552,31 +552,6 @@ gradle.projectsEvaluated { } } -if (System.properties.get("build.compare") != null) { - apply plugin: 'compare-gradle-builds' - compareGradleBuilds { - ext.referenceProject = System.properties.get("build.compare") - doFirst { - if (file(referenceProject).exists() == false) { - throw new GradleException( - "Use git worktree to check out a version to compare against to ../elasticsearch_build_reference" - ) - } - } - sourceBuild { - gradleVersion = gradle.getGradleVersion() - projectDir = referenceProject - tasks = ["clean", "assemble"] - arguments = ["-Dbuild.compare_friendly=true"] - } - targetBuild { - tasks = ["clean", "assemble"] - // use -Dorg.gradle.java.home= to alter jdk versions - arguments = ["-Dbuild.compare_friendly=true"] - } - } -} - allprojects { task resolveAllDependencies { dependsOn tasks.matching { it.name == "pullFixture"} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index d86cfdae2db..1f713e9f1be 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -922,10 +922,6 @@ class BuildPlugin implements Plugin { jarTask.manifest.attributes('Change': shortHash) } } - // Force manifest entries that change by nature to a constant to be able to compare builds more effectively - if (System.properties.getProperty("build.compare_friendly", "false") == "true") { - jarTask.manifest.getAttributes().clear() - } } // add license/notice files project.afterEvaluate { From ce30afcd01143946430bafcb568a65897101cb86 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Thu, 30 May 2019 18:04:47 +0200 Subject: [PATCH 007/210] Deprecate CommonTermsQuery and cutoff_frequency (#42619) (#42691) Since the max_score optimization landed in Elasticsearch 7, the CommonTermsQuery is redundant and slower. Moreover the cutoff_frequency parameter for MatchQuery and MultiMatchQuery is redundant. Relates to #27096 (cherry picked from commit 04b74497314eeec076753a33b3b6cc11549646e8) --- .../query-dsl/common-terms-query.asciidoc | 7 ++++ docs/reference/query-dsl/match-query.asciidoc | 3 ++ .../search.query/50_queries_with_synonyms.yml | 27 +++++++++++++ .../lucene/queries/BlendedTermQuery.java | 5 +++ .../queries/ExtendedCommonTermsQuery.java | 4 ++ .../index/query/CommonTermsQueryBuilder.java | 11 +++++ .../index/query/MatchQueryBuilder.java | 15 ++++++- .../index/query/MultiMatchQueryBuilder.java | 15 ++++++- .../index/query/QueryBuilders.java | 3 ++ .../index/search/MatchQuery.java | 4 ++ .../elasticsearch/search/SearchModule.java | 7 +++- .../query/CommonTermsQueryBuilderTests.java | 40 +++++++++++++++++++ .../query/CommonTermsQueryParserTests.java | 4 +- .../index/query/MatchQueryBuilderTests.java | 6 +-- .../query/MultiMatchQueryBuilderTests.java | 3 -- .../search/SearchModuleTests.java | 6 +-- .../profile/query/RandomQueryGenerator.java | 33 +-------------- .../test/AbstractQueryTestCase.java | 2 +- 18 files changed, 145 insertions(+), 50 deletions(-) diff --git a/docs/reference/query-dsl/common-terms-query.asciidoc b/docs/reference/query-dsl/common-terms-query.asciidoc index 87288778246..f2d784eb0c4 100644 --- a/docs/reference/query-dsl/common-terms-query.asciidoc +++ b/docs/reference/query-dsl/common-terms-query.asciidoc @@ -1,6 +1,8 @@ [[query-dsl-common-terms-query]] === Common Terms Query +deprecated[7.3.0,"Use <> instead, which skips blocks of documents efficiently, without any configuration, provided that the total number of hits is not tracked."] + The `common` terms query is a modern alternative to stopwords which improves the precision and recall of search results (by taking stopwords into account), without sacrificing performance. @@ -83,6 +85,7 @@ GET /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]] The number of terms which should match can be controlled with the <> @@ -108,6 +111,7 @@ GET /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]] which is roughly equivalent to: @@ -154,6 +158,7 @@ GET /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]] which is roughly equivalent to: @@ -209,6 +214,7 @@ GET /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]] which is roughly equivalent to: @@ -270,6 +276,7 @@ GET /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]] which is roughly equivalent to: diff --git a/docs/reference/query-dsl/match-query.asciidoc b/docs/reference/query-dsl/match-query.asciidoc index 1f8bf6892ab..5e45d2b3212 100644 --- a/docs/reference/query-dsl/match-query.asciidoc +++ b/docs/reference/query-dsl/match-query.asciidoc @@ -103,6 +103,8 @@ GET /_search [[query-dsl-match-query-cutoff]] ===== Cutoff frequency +deprecated[7.3.0,"This option can be omitted as the <> can skip block of documents efficiently, without any configuration, provided that the total number of hits is not tracked."] + The match query supports a `cutoff_frequency` that allows specifying an absolute or relative document frequency where high frequency terms are moved into an optional subquery and are only scored @@ -139,6 +141,7 @@ GET /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]] IMPORTANT: The `cutoff_frequency` option operates on a per-shard-level. This means that when trying it out on test indexes with low document numbers you diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml index 784ffd9dd12..ce9cc749557 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml @@ -1,5 +1,8 @@ --- "Test common terms query with stacked tokens": + - skip: + features: "warnings" + - do: indices.create: index: test @@ -47,6 +50,8 @@ refresh: true - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -62,6 +67,8 @@ - match: { hits.hits.2._id: "3" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -76,6 +83,8 @@ - match: { hits.hits.1._id: "2" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -90,6 +99,8 @@ - match: { hits.hits.2._id: "3" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -103,6 +114,8 @@ - match: { hits.hits.0._id: "2" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -118,6 +131,8 @@ - match: { hits.hits.1._id: "1" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -132,6 +147,8 @@ - match: { hits.hits.0._id: "2" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -144,6 +161,8 @@ - match: { hits.hits.0._id: "2" } - do: + warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -158,6 +177,8 @@ - match: { hits.hits.2._id: "3" } - do: + warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -172,6 +193,8 @@ - match: { hits.hits.1._id: "2" } - do: + warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -187,6 +210,8 @@ - match: { hits.hits.2._id: "3" } - do: + warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: @@ -201,6 +226,8 @@ - match: { hits.hits.1._id: "2" } - do: + warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked]' search: rest_total_hits_as_int: true body: diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index c696d476bbb..f823f3a1426 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -278,6 +278,11 @@ public abstract class BlendedTermQuery extends Query { return Objects.hash(classHash(), Arrays.hashCode(equalsTerms())); } + /** + * @deprecated Since max_score optimization landed in 7.0, normal MultiMatchQuery + * will achieve the same result without any configuration. + */ + @Deprecated public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final float maxTermFrequency) { return new BlendedTermQuery(terms, boosts) { @Override diff --git a/server/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java b/server/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java index 249b7fa83b5..2d70ed8b90a 100644 --- a/server/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.lucene.search.Queries; * Extended version of {@link CommonTermsQuery} that allows to pass in a * {@code minimumNumberShouldMatch} specification that uses the actual num of high frequent terms * to calculate the minimum matching terms. + * + * @deprecated Since max_optimization optimization landed in 7.0, normal MatchQuery + * will achieve the same result without any configuration. */ +@Deprecated public class ExtendedCommonTermsQuery extends CommonTermsQuery { public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) { diff --git a/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java index 2fd4d710f96..3c2f80a25f7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java @@ -48,9 +48,16 @@ import java.util.Objects; * and high-frequency terms are added to an optional boolean clause. The * optional clause is only executed if the required "low-frequency' clause * matches. + * + * @deprecated Since max_optimization optimization landed in 7.0, normal MatchQuery + * will achieve the same result without any configuration. */ +@Deprecated public class CommonTermsQueryBuilder extends AbstractQueryBuilder { + public static final String COMMON_TERMS_QUERY_DEPRECATION_MSG = "[match] query which can efficiently " + + "skip blocks of documents if the total number of hits is not tracked"; + public static final String NAME = "common"; public static final float DEFAULT_CUTOFF_FREQ = 0.01f; @@ -87,7 +94,9 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder { + + private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "you can omit this option, " + + "the [match] query can skip block of documents efficiently if the total number of hits is not tracked"; + public static final ParseField ZERO_TERMS_QUERY_FIELD = new ParseField("zero_terms_query"); - public static final ParseField CUTOFF_FREQUENCY_FIELD = new ParseField("cutoff_frequency"); + /** + * @deprecated Since max_optimization optimization landed in 7.0, normal MatchQuery + * will achieve the same result without any configuration. + */ + @Deprecated + public static final ParseField CUTOFF_FREQUENCY_FIELD = + new ParseField("cutoff_frequency").withAllDeprecated(CUTOFF_FREQUENCY_DEPRECATION_MSG); public static final ParseField LENIENT_FIELD = new ParseField("lenient"); public static final ParseField FUZZY_TRANSPOSITIONS_FIELD = new ParseField("fuzzy_transpositions"); public static final ParseField FUZZY_REWRITE_FIELD = new ParseField("fuzzy_rewrite"); @@ -252,7 +262,10 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { * Set a cutoff value in [0..1] (or absolute number >=1) representing the * maximum threshold of a terms document frequency to be considered a low * frequency term. + * + * @deprecated see {@link MatchQueryBuilder#CUTOFF_FREQUENCY_FIELD} for more details */ + @Deprecated public MatchQueryBuilder cutoffFrequency(float cutoff) { this.cutoffFrequency = cutoff; return this; diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index 8200a8068af..2a7c3729fe2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -51,6 +51,10 @@ import java.util.TreeMap; * Same as {@link MatchQueryBuilder} but supports multiple fields. */ public class MultiMatchQueryBuilder extends AbstractQueryBuilder { + + private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "you can omit this option, " + + "the [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked"; + public static final String NAME = "multi_match"; public static final MultiMatchQueryBuilder.Type DEFAULT_TYPE = MultiMatchQueryBuilder.Type.BEST_FIELDS; @@ -64,7 +68,8 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder(MoreLikeThisQueryBuilder.NAME, MoreLikeThisQueryBuilder::new, MoreLikeThisQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(WrapperQueryBuilder.NAME, WrapperQueryBuilder::new, WrapperQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(CommonTermsQueryBuilder.NAME, CommonTermsQueryBuilder::new, CommonTermsQueryBuilder::fromXContent)); + registerQuery(new QuerySpec<>(new ParseField(CommonTermsQueryBuilder.NAME).withAllDeprecated(COMMON_TERMS_QUERY_DEPRECATION_MSG), + CommonTermsQueryBuilder::new, CommonTermsQueryBuilder::fromXContent)); registerQuery( new QuerySpec<>(SpanMultiTermQueryBuilder.NAME, SpanMultiTermQueryBuilder::new, SpanMultiTermQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(FunctionScoreQueryBuilder.NAME, FunctionScoreQueryBuilder::new, diff --git a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index 5e443ec41ed..d02b60c52d5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -111,6 +111,30 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase new CommonTermsQueryBuilder(null, "text")); assertEquals("field name is null or empty", e.getMessage()); @@ -146,6 +170,8 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase parseQuery(shortJson)); assertEquals("[common] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage()); + + assertDeprecationWarning(); + } + + private void assertDeprecationWarning() { + assertWarnings("Deprecated field [common] used, replaced by [" + CommonTermsQueryBuilder.COMMON_TERMS_QUERY_DEPRECATION_MSG + "]"); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryParserTests.java b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryParserTests.java index f4e737ea4b0..761520de039 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryParserTests.java @@ -22,10 +22,8 @@ package org.elasticsearch.index.query; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.io.IOException; - public class CommonTermsQueryParserTests extends ESSingleNodeTestCase { - public void testWhenParsedQueryIsNullNoNullPointerExceptionIsThrown() throws IOException { + public void testWhenParsedQueryIsNullNoNullPointerExceptionIsThrown() { final String index = "test-index"; final String type = "test-type"; client() diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index a7aad3dbc3e..f79bbb86242 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -124,10 +124,6 @@ public class MatchQueryBuilderTests extends FullTextQueryTestCase query.parse(Type.PHRASE, STRING_FIELD_NAME, "")); } - + private static class MockGraphAnalyzer extends Analyzer { CannedBinaryTokenStream tokenStream; diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 6590a560935..970a4c3a37e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -134,9 +134,6 @@ public class MultiMatchQueryBuilderTests extends FullTextQueryTestCase allSupportedQueries = new ArrayList<>(); Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES); Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES); @@ -254,6 +254,7 @@ public class SearchModuleTests extends ESTestCase { Set registeredNonDeprecated = module.getNamedXContents().stream() .filter(e -> e.categoryClass.equals(QueryBuilder.class)) + .filter(e -> e.name.getDeprecatedNames().length == 0) .map(e -> e.name.getPreferredName()) .collect(toSet()); Set registeredAll = module.getNamedXContents().stream() @@ -316,7 +317,6 @@ public class SearchModuleTests extends ESTestCase { private static final String[] NON_DEPRECATED_QUERIES = new String[] { "bool", "boosting", - "common", "constant_score", "dis_max", "exists", @@ -364,7 +364,7 @@ public class SearchModuleTests extends ESTestCase { }; //add here deprecated queries to make sure we log a deprecation warnings when they are used - private static final String[] DEPRECATED_QUERIES = new String[] {}; + private static final String[] DEPRECATED_QUERIES = new String[] {"common"}; /** * Dummy test {@link AggregationBuilder} used to test registering aggregation builders. diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java b/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java index 00b859394c6..ea9ef964153 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java @@ -22,11 +22,9 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.util.English; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.CommonTermsQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; import org.elasticsearch.index.query.FuzzyQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -72,7 +70,7 @@ public class RandomQueryGenerator { } private static QueryBuilder randomTerminalQuery(List stringFields, List numericFields, int numDocs) { - switch (randomIntBetween(0,6)) { + switch (randomIntBetween(0,5)) { case 0: return randomTermQuery(stringFields, numDocs); case 1: @@ -82,10 +80,8 @@ public class RandomQueryGenerator { case 3: return QueryBuilders.matchAllQuery(); case 4: - return randomCommonTermsQuery(stringFields, numDocs); - case 5: return randomFuzzyQuery(stringFields); - case 6: + case 5: return randomIDsQuery(); default: return randomTermQuery(stringFields, numDocs); @@ -169,31 +165,6 @@ public class RandomQueryGenerator { return QueryBuilders.constantScoreQuery(randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)); } - private static QueryBuilder randomCommonTermsQuery(List fields, int numDocs) { - int numTerms = randomInt(numDocs); - - QueryBuilder q = QueryBuilders.commonTermsQuery(randomField(fields), randomQueryString(numTerms)); - if (randomBoolean()) { - ((CommonTermsQueryBuilder)q).boost(randomFloat()); - } - - if (randomBoolean()) { - ((CommonTermsQueryBuilder)q).cutoffFrequency(randomFloat()); - } - - if (randomBoolean()) { - ((CommonTermsQueryBuilder)q).highFreqMinimumShouldMatch(Integer.toString(randomInt(numTerms))) - .highFreqOperator(randomBoolean() ? Operator.AND : Operator.OR); - } - - if (randomBoolean()) { - ((CommonTermsQueryBuilder)q).lowFreqMinimumShouldMatch(Integer.toString(randomInt(numTerms))) - .lowFreqOperator(randomBoolean() ? Operator.AND : Operator.OR); - } - - return q; - } - private static QueryBuilder randomFuzzyQuery(List fields) { QueryBuilder q = QueryBuilders.fuzzyQuery(randomField(fields), randomQueryString(1)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 4abb39bf6a4..095db80e6fe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -164,7 +164,7 @@ public abstract class AbstractQueryTestCase> * parse exception. Some specific objects do not cause any exception as they can hold arbitrary content; they can be * declared by overriding {@link #getObjectsHoldingArbitraryContent()}. */ - public final void testUnknownObjectException() throws IOException { + public void testUnknownObjectException() throws IOException { Set candidates = new HashSet<>(); // Adds the valid query to the list of queries to modify and test candidates.add(createTestQueryBuilder().toString()); From 711de2f59a785649c66f4a0d01e9fec29db35b46 Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Thu, 30 May 2019 10:27:17 -0600 Subject: [PATCH 008/210] Make hashed token ids url safe (#42651) This commit changes the way token ids are hashed so that the output is url safe without requiring encoding. This follows the pattern that we use for document ids that are autogenerated, see UUIDs and the associated classes for additional details. --- .../xpack/core/security/authc/support/Hasher.java | 4 ++-- .../xpack/security/authc/TokenService.java | 2 +- .../xpack/security/authc/TokenServiceTests.java | 10 ++++++++-- .../upgrades/TokenBackwardsCompatibilityIT.java | 2 ++ .../rest-api-spec/test/mixed_cluster/50_token_auth.yml | 6 ++++++ .../test/upgraded_cluster/50_token_auth.yml | 4 ++++ 6 files changed, 23 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java index 5413a38bd62..1b5b65e60c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java @@ -360,14 +360,14 @@ public enum Hasher { public char[] hash(SecureString text) { MessageDigest md = MessageDigests.sha256(); md.update(CharArrays.toUtf8Bytes(text.getChars())); - return Base64.getEncoder().encodeToString(md.digest()).toCharArray(); + return Base64.getUrlEncoder().withoutPadding().encodeToString(md.digest()).toCharArray(); } @Override public boolean verify(SecureString text, char[] hash) { MessageDigest md = MessageDigests.sha256(); md.update(CharArrays.toUtf8Bytes(text.getChars())); - return CharArrays.constantTimeEquals(Base64.getEncoder().encodeToString(md.digest()).toCharArray(), hash); + return CharArrays.constantTimeEquals(Base64.getUrlEncoder().withoutPadding().encodeToString(md.digest()).toCharArray(), hash); } }, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 8d4482a6d58..3a41b1b1ccf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -181,7 +181,7 @@ public final class TokenService { TimeValue.MINUS_ONE, Property.NodeScope); static final String TOKEN_DOC_TYPE = "token"; - private static final int HASHED_TOKEN_LENGTH = 44; + private static final int HASHED_TOKEN_LENGTH = 43; // UUIDs are 16 bytes encoded base64 without padding, therefore the length is (16 / 3) * 4 + ((16 % 3) * 8 + 5) / 6 chars private static final int TOKEN_LENGTH = 22; private static final String TOKEN_DOC_ID_PREFIX = TOKEN_DOC_TYPE + "_"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index c3c4f95e2d0..e81cbc0f437 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -60,7 +60,10 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import javax.crypto.SecretKey; import java.io.IOException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.time.Clock; import java.time.Instant; @@ -70,8 +73,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import javax.crypto.SecretKey; - import static java.time.Clock.systemUTC; import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; import static org.elasticsearch.test.ClusterServiceUtils.setState; @@ -722,6 +723,11 @@ public class TokenServiceTests extends ESTestCase { assertThat(authToken, Matchers.nullValue()); } + public void testHashedTokenIsUrlSafe() throws Exception { + final String hashedId = TokenService.hashTokenString(UUIDs.randomBase64UUID()); + assertEquals(hashedId, URLEncoder.encode(hashedId, StandardCharsets.UTF_8.name())); + } + private TokenService createTokenService(Settings settings, Clock clock) throws GeneralSecurityException { return new TokenService(settings, clock, client, licenseState, securityMainIndex, securityTokensIndex, clusterService); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index 2245fa3ea1d..0eb0f696581 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.upgrades; import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -30,6 +31,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; +@AwaitsFix(bugUrl = "need to backport #42651") public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase { private Collection twoClients = null; diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml index f426d9b2525..a34128579f3 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml @@ -2,6 +2,8 @@ "Get the indexed token and use if to authenticate": - skip: features: headers + version: " - 7.99.99" + reason: "Need to backport PR #42651" - do: cluster.health: @@ -59,6 +61,8 @@ "Get the indexed refreshed access token and use if to authenticate": - skip: features: headers + version: " - 7.99.99" + reason: "Need to backport PR #42651" - do: get: @@ -111,6 +115,8 @@ "Get the indexed refresh token and use it to get another access token and authenticate": - skip: features: headers + version: " - 7.99.99" + reason: "Need to backport PR #42651" - do: get: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml index 430f94c1064..64897707c15 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml @@ -2,6 +2,8 @@ "Get the indexed token and use if to authenticate": - skip: features: headers + version: " - 8.0.0" + reason: "Need to backport PR #42651" - do: cluster.health: @@ -49,6 +51,8 @@ "Get the indexed refresh token and use if to get another access token and authenticate": - skip: features: headers + version: " - 8.0.0" + reason: "Need to backport PR #42651" - do: get: From b5527b32781017f9be3e5a8dd4019b771186fc66 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 30 May 2019 12:05:35 -0500 Subject: [PATCH 009/210] [ML] [Data Frame] add support for weighted_avg agg (#42646) (#42714) --- .../integration/DataFramePivotRestIT.java | 39 +++++++++++++++++++ .../transforms/pivot/Aggregations.java | 1 + .../transforms/pivot/SchemaUtil.java | 3 +- .../transforms/pivot/AggregationsTests.java | 4 ++ .../transforms/pivot/PivotTests.java | 10 +++++ 5 files changed, 56 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index a0bec6ec13c..3c661a0f4ac 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -473,6 +473,45 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase { assertEquals((4 + 15), Double.valueOf(latlon[1]), 0.000001); } + public void testPivotWithWeightedAvgAgg() throws Exception { + String transformId = "weightedAvgAggTransform"; + String dataFrameIndex = "weighted_avg_pivot_reviews"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, dataFrameIndex); + + final Request createDataframeTransformRequest = createRequestWithAuth("PUT", DATAFRAME_ENDPOINT + transformId, + BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + + String config = "{" + + " \"source\": {\"index\":\"" + REVIEWS_INDEX_NAME + "\"}," + + " \"dest\": {\"index\":\"" + dataFrameIndex + "\"},"; + + config += " \"pivot\": {" + + " \"group_by\": {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } }," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"weighted_avg\": {" + + " \"value\": {\"field\": \"stars\"}," + + " \"weight\": {\"field\": \"stars\"}" + + "} } } }" + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + assertTrue(indexExists(dataFrameIndex)); + + Map searchResult = getAsMap(dataFrameIndex + "/_search?q=reviewer:user_4"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + Number actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", searchResult)).get(0); + assertEquals(4.47169811, actual.doubleValue(), 0.000001); + } + private void assertOnePivotValue(String query, double expected) throws IOException { Map searchResult = getAsMap(query); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java index 615c9b2e8d2..4e74f9085e3 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java @@ -37,6 +37,7 @@ public final class Aggregations { SUM("sum", SOURCE), GEO_CENTROID("geo_centroid", "geo_point"), SCRIPTED_METRIC("scripted_metric", DYNAMIC), + WEIGHTED_AVG("weighted_avg", DYNAMIC), BUCKET_SCRIPT("bucket_script", DYNAMIC); private final String aggregationType; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java index 304f35b8c4c..4ac77c38f7d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java @@ -17,6 +17,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.transforms.pivot.PivotConfig; @@ -77,7 +78,7 @@ public final class SchemaUtil { ValuesSourceAggregationBuilder valueSourceAggregation = (ValuesSourceAggregationBuilder) agg; aggregationSourceFieldNames.put(valueSourceAggregation.getName(), valueSourceAggregation.field()); aggregationTypes.put(valueSourceAggregation.getName(), valueSourceAggregation.getType()); - } else if(agg instanceof ScriptedMetricAggregationBuilder) { + } else if(agg instanceof ScriptedMetricAggregationBuilder || agg instanceof MultiValuesSourceAggregationBuilder) { aggregationTypes.put(agg.getName(), agg.getType()); } else { // execution should not reach this point diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java index 8443699430a..ace42cb65fc 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java @@ -49,5 +49,9 @@ public class AggregationsTests extends ESTestCase { // bucket_script assertEquals("_dynamic", Aggregations.resolveTargetMapping("bucket_script", null)); assertEquals("_dynamic", Aggregations.resolveTargetMapping("bucket_script", "int")); + + // weighted_avg + assertEquals("_dynamic", Aggregations.resolveTargetMapping("weighted_avg", null)); + assertEquals("_dynamic", Aggregations.resolveTargetMapping("weighted_avg", "double")); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index 20ea84502ed..d54cbad97f7 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -215,6 +215,16 @@ public class PivotTests extends ESTestCase { "\"buckets_path\":{\"param_1\":\"other_bucket\"}," + "\"script\":\"return params.param_1\"}}}"); } + if (agg.equals(AggregationType.WEIGHTED_AVG.getName())) { + return parseAggregations("{\n" + + "\"pivot_weighted_avg\": {\n" + + " \"weighted_avg\": {\n" + + " \"value\": {\"field\": \"values\"},\n" + + " \"weight\": {\"field\": \"weights\"}\n" + + " }\n" + + "}\n" + + "}"); + } return parseAggregations("{\n" + " \"pivot_" + agg + "\": {\n" + " \"" + agg + "\": {\n" + " \"field\": \"values\"\n" + " }\n" + " }" + "}"); } From d14799f0a584d97696a46e5b1110ac88af21df8d Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 30 May 2019 16:54:01 +0100 Subject: [PATCH 010/210] Prevent merging nodes' data paths (#42665) Today Elasticsearch does not prevent you from reconfiguring a node's `path.data` to point to data paths that previously belonged to more than one node. There's no good reason to be able to do this, and the consequences can be quietly disastrous. Furthermore, #42489 might result in a user trying to split up a previously-shared collection of data paths by hand and there's definitely scope for mixing the paths up across nodes when doing this. This change adds a check during startup to ensure that each data path belongs to the same node. --- .../elasticsearch/env/NodeEnvironment.java | 16 +++++++++ .../elasticsearch/env/NodeEnvironmentIT.java | 34 ++++++++++++++++++- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index c149f262046..bcd6e0577cd 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -70,6 +70,7 @@ import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -403,10 +404,25 @@ public final class NodeEnvironment implements Closeable { private static NodeMetaData loadOrCreateNodeMetaData(Settings settings, Logger logger, NodePath... nodePaths) throws IOException { final Path[] paths = Arrays.stream(nodePaths).map(np -> np.path).toArray(Path[]::new); + + final Set nodeIds = new HashSet<>(); + for (final Path path : paths) { + final NodeMetaData metaData = NodeMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, path); + if (metaData != null) { + nodeIds.add(metaData.nodeId()); + } + } + if (nodeIds.size() > 1) { + throw new IllegalStateException( + "data paths " + Arrays.toString(paths) + " belong to multiple nodes with IDs " + nodeIds); + } + NodeMetaData metaData = NodeMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, paths); if (metaData == null) { + assert nodeIds.isEmpty() : nodeIds; metaData = new NodeMetaData(generateNodeId(settings), Version.CURRENT); } else { + assert nodeIds.equals(Collections.singleton(metaData.nodeId())) : nodeIds + " doesn't match " + metaData; metaData = metaData.upgradeToCurrentVersion(); } diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java index 74de578426f..daddd74ed90 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java @@ -26,7 +26,10 @@ import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; +import java.io.IOException; import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -35,7 +38,7 @@ import static org.hamcrest.Matchers.startsWith; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class NodeEnvironmentIT extends ESIntegTestCase { - public void testStartFailureOnDataForNonDataNode() throws Exception { + public void testStartFailureOnDataForNonDataNode() { final String indexName = "test-fail-on-data"; logger.info("--> starting one node"); @@ -123,4 +126,33 @@ public class NodeEnvironmentIT extends ESIntegTestCase { assertThat(illegalStateException.getMessage(), allOf(startsWith("cannot upgrade a node from version ["), endsWith("] directly to version [" + Version.CURRENT + "]"))); } + + public void testFailsToStartOnDataPathsFromMultipleNodes() throws IOException { + final List nodes = internalCluster().startNodes(2); + ensureStableCluster(2); + + final List node0DataPaths = Environment.PATH_DATA_SETTING.get(internalCluster().dataPathSettings(nodes.get(0))); + final List node1DataPaths = Environment.PATH_DATA_SETTING.get(internalCluster().dataPathSettings(nodes.get(1))); + + final List allDataPaths = new ArrayList<>(node0DataPaths); + allDataPaths.addAll(node1DataPaths); + + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(1))); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(0))); + + final IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, + () -> internalCluster().startNode(Settings.builder().putList(Environment.PATH_DATA_SETTING.getKey(), allDataPaths))); + + assertThat(illegalStateException.getMessage(), containsString("belong to multiple nodes with IDs")); + + final List node0DataPathsPlusOne = new ArrayList<>(node0DataPaths); + node0DataPathsPlusOne.add(createTempDir().toString()); + internalCluster().startNode(Settings.builder().putList(Environment.PATH_DATA_SETTING.getKey(), node0DataPathsPlusOne)); + + final List node1DataPathsPlusOne = new ArrayList<>(node1DataPaths); + node1DataPathsPlusOne.add(createTempDir().toString()); + internalCluster().startNode(Settings.builder().putList(Environment.PATH_DATA_SETTING.getKey(), node1DataPathsPlusOne)); + + ensureStableCluster(2); + } } From c1816354ed01d537c9a4f0608ab67aff2bb2cb9e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 May 2019 10:29:42 -0700 Subject: [PATCH 011/210] [Backport] Improve build configuration time (#42674) --- build.gradle | 9 +- buildSrc/build.gradle | 49 +- .../elasticsearch/gradle/BuildPlugin.groovy | 755 +++++++----------- .../gradle/plugin/PluginBuildPlugin.groovy | 5 +- .../gradle/precommit/PrecommitTasks.groovy | 22 +- .../gradle/test/ClusterFormationTasks.groovy | 27 +- .../gradle/test/RestIntegTestTask.groovy | 37 +- .../test/StandaloneRestTestPlugin.groovy | 8 +- .../elasticsearch/gradle/JdkJarHellCheck.java | 0 .../gradle/LazyFileOutputStream.java | 0 .../org/elasticsearch/gradle/LoggedExec.java | 0 .../org/elasticsearch/gradle/Version.java | 0 .../gradle/VersionProperties.java | 0 .../info/GenerateGlobalBuildInfoTask.java | 276 +++++++ .../gradle/info/GlobalBuildInfoPlugin.java | 198 +++++ .../gradle/info/GlobalInfoExtension.java | 12 + .../elasticsearch/gradle/info/JavaHome.java | 35 + .../gradle/info/PrintGlobalBuildInfoTask.java | 84 ++ .../gradle/precommit/ThirdPartyAuditTask.java | 11 +- ...elasticsearch.global-build-info.properties | 1 + .../testKit/elasticsearch.build/build.gradle | 1 + distribution/tools/plugin-cli/build.gradle | 6 +- libs/core/build.gradle | 8 +- modules/transport-netty4/build.gradle | 14 +- plugins/discovery-azure-classic/build.gradle | 234 +++--- plugins/discovery-ec2/build.gradle | 12 +- plugins/ingest-attachment/build.gradle | 10 +- plugins/repository-hdfs/build.gradle | 6 +- plugins/repository-s3/build.gradle | 214 ++--- plugins/transport-nio/build.gradle | 14 +- server/build.gradle | 12 +- x-pack/plugin/ccr/qa/restart/build.gradle | 2 +- x-pack/plugin/security/build.gradle | 30 +- x-pack/plugin/security/cli/build.gradle | 26 +- .../sql/qa/security/with-ssl/build.gradle | 14 +- x-pack/plugin/watcher/build.gradle | 78 +- x-pack/qa/full-cluster-restart/build.gradle | 32 +- .../reindex-tests-with-security/build.gradle | 6 +- x-pack/qa/rolling-upgrade/build.gradle | 32 +- 39 files changed, 1333 insertions(+), 947 deletions(-) rename buildSrc/src/main/{minimumRuntime => java}/org/elasticsearch/gradle/JdkJarHellCheck.java (100%) rename buildSrc/src/main/{minimumRuntime => java}/org/elasticsearch/gradle/LazyFileOutputStream.java (100%) rename buildSrc/src/main/{minimumRuntime => java}/org/elasticsearch/gradle/LoggedExec.java (100%) rename buildSrc/src/main/{minimumRuntime => java}/org/elasticsearch/gradle/Version.java (100%) rename buildSrc/src/main/{minimumRuntime => java}/org/elasticsearch/gradle/VersionProperties.java (100%) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/info/GenerateGlobalBuildInfoTask.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalInfoExtension.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/info/JavaHome.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/info/PrintGlobalBuildInfoTask.java create mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.global-build-info.properties diff --git a/build.gradle b/build.gradle index 93659101d84..2a79de7e4fc 100644 --- a/build.gradle +++ b/build.gradle @@ -31,6 +31,7 @@ import org.gradle.plugins.ide.eclipse.model.SourceFolder plugins { id 'com.gradle.build-scan' version '2.2.1' id 'base' + id 'elasticsearch.global-build-info' } if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") { buildScan { @@ -263,7 +264,7 @@ allprojects { } project.afterEvaluate { - configurations.all { + configurations.matching { it.canBeResolved }.all { resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> projectSubstitutions.each { k,v -> subs.substitute(subs.module(k)).with(subs.project(v)) @@ -337,7 +338,7 @@ gradle.projectsEvaluated { if (tasks.findByPath('test') != null && tasks.findByPath('integTest') != null) { integTest.mustRunAfter test } - configurations.all { Configuration configuration -> + configurations.matching { it.canBeResolved }.all { Configuration configuration -> dependencies.all { Dependency dep -> Project upstreamProject = dependencyToProject(dep) if (upstreamProject != null) { @@ -593,7 +594,3 @@ allprojects { } } } - - - - diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1d08fa87e35..d3a16f55277 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -65,39 +65,10 @@ processResources { if (JavaVersion.current() < JavaVersion.VERSION_11) { throw new GradleException('At least Java 11 is required to build elasticsearch gradle tools') } -// Gradle 4.10 does not support setting this to 11 yet -targetCompatibility = "10" -sourceCompatibility = "10" - -// We have a few classes that need to be compiled for older java versions because these are used to run checks against -// those -sourceSets { - minimumRuntime { - // We only want Java here, but the Groovy doesn't configure javadoc correctly if we don't define this as groovy - groovy { - srcDirs = ['src/main/minimumRuntime'] - } - } -} -compileMinimumRuntimeGroovy { - // We can't use BuildPlugin here, so read from file - String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim() - targetCompatibility = minimumRuntimeVersion - sourceCompatibility = minimumRuntimeVersion -} -dependencies { - if (project.ext.has("isEclipse") == false || project.ext.isEclipse == false) { - // eclipse is confused if this is set explicitly - compile sourceSets.minimumRuntime.output - } - minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}" - minimumRuntimeCompile localGroovy() - minimumRuntimeCompile gradleApi() -} -jar { - from sourceSets.minimumRuntime.output -} +// Keep compatibility with Java 8 for external users of build-tools that haven't migrated to Java 11 +targetCompatibility = '8' +sourceCompatibility = '8' /***************************************************************************** * Dependencies used by the entire build * @@ -117,7 +88,7 @@ dependencies { compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" - compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4' + compile 'com.github.jengelman.gradle.plugins:shadow:4.0.3' compile 'de.thetaphi:forbiddenapis:2.6' compile 'com.avast.gradle:gradle-docker-compose-plugin:0.8.12' testCompile "junit:junit:${props.getProperty('junit')}" @@ -162,7 +133,6 @@ if (project != rootProject) { dependenciesInfo.enabled = false forbiddenApisMain.enabled = false forbiddenApisTest.enabled = false - forbiddenApisMinimumRuntime.enabled = false jarHell.enabled = false thirdPartyAudit.enabled = false @@ -184,16 +154,7 @@ if (project != rootProject) { from configurations.distribution into localDownloads } - - test { - // The test task is configured to runtimeJava version, but build-tools doesn't support all of them, so test - // with compiler instead on the ones that are too old. - if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_10) { - executable = "${project.compilerJavaHome}/bin/java" - } - } - - // This can't be an RandomizedTestingTask because we can't yet reference it + task integTest(type: Test) { // integration test requires the local testing repo for example plugin builds dependsOn project.rootProject.allprojects.collect { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 1f713e9f1be..894496c8329 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -19,15 +19,25 @@ package org.elasticsearch.gradle import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar +import groovy.transform.CompileDynamic +import groovy.transform.CompileStatic import org.apache.commons.io.IOUtils -import org.apache.tools.ant.taskdefs.condition.Os +import org.apache.tools.ant.taskdefs.Java import org.eclipse.jgit.lib.Constants import org.eclipse.jgit.lib.RepositoryBuilder +import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin +import org.elasticsearch.gradle.info.GlobalInfoExtension +import org.elasticsearch.gradle.info.JavaHome +import org.elasticsearch.gradle.precommit.DependencyLicensesTask import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.ErrorReportingTestListener +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster +import org.gradle.api.Action import org.gradle.api.GradleException import org.gradle.api.InvalidUserDataException import org.gradle.api.JavaVersion +import org.gradle.api.NamedDomainObjectContainer import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.Task @@ -41,22 +51,34 @@ import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.artifacts.repositories.ArtifactRepository import org.gradle.api.artifacts.repositories.IvyArtifactRepository +import org.gradle.api.artifacts.repositories.IvyPatternRepositoryLayout import org.gradle.api.artifacts.repositories.MavenArtifactRepository import org.gradle.api.credentials.HttpHeaderCredentials import org.gradle.api.execution.TaskActionListener import org.gradle.api.execution.TaskExecutionGraph +import org.gradle.api.file.CopySpec +import org.gradle.api.plugins.BasePlugin +import org.gradle.api.plugins.BasePluginConvention +import org.gradle.api.plugins.ExtraPropertiesExtension import org.gradle.api.plugins.JavaPlugin +import org.gradle.api.plugins.JavaPluginExtension +import org.gradle.api.publish.PublishingExtension import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.publish.maven.tasks.GenerateMavenPom import org.gradle.api.tasks.SourceSet +import org.gradle.api.tasks.SourceSetContainer import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.GroovyCompile import org.gradle.api.tasks.compile.JavaCompile import org.gradle.api.tasks.javadoc.Javadoc import org.gradle.api.tasks.testing.Test +import org.gradle.api.tasks.testing.logging.TestLoggingContainer import org.gradle.authentication.http.HttpHeaderAuthentication +import org.gradle.external.javadoc.CoreJavadocOptions import org.gradle.internal.jvm.Jvm +import org.gradle.language.base.plugins.LifecycleBasePlugin +import org.gradle.process.CommandLineArgumentProvider import org.gradle.process.ExecResult import org.gradle.process.ExecSpec import org.gradle.util.GradleVersion @@ -64,18 +86,19 @@ import org.gradle.util.GradleVersion import java.nio.charset.StandardCharsets import java.time.ZoneOffset import java.time.ZonedDateTime -import java.util.concurrent.ExecutorService -import java.util.concurrent.Executors -import java.util.concurrent.Future import java.util.regex.Matcher /** * Encapsulates build configuration for elasticsearch projects. */ +@CompileStatic class BuildPlugin implements Plugin { @Override void apply(Project project) { + // make sure the global build info plugin is applied to the root project + project.rootProject.pluginManager.apply(GlobalBuildInfoPlugin) + if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) { throw new InvalidUserDataException('elasticsearch.standalone-test, ' + 'elasticsearch.standalone-rest-test, and elasticsearch.build ' @@ -105,9 +128,8 @@ class BuildPlugin implements Plugin { project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) setupSeed(project) - globalBuildInfo(project) configureRepositories(project) - project.ext.versions = VersionProperties.versions + project.extensions.getByType(ExtraPropertiesExtension).set('versions', VersionProperties.versions) configureInputNormalization(project) configureSourceSets(project) configureCompile(project) @@ -120,174 +142,36 @@ class BuildPlugin implements Plugin { // Common config when running with a FIPS-140 runtime JVM // Need to do it here to support external plugins - if (project.ext.inFipsJvm) { - project.tasks.withType(Test) { - systemProperty 'javax.net.ssl.trustStorePassword', 'password' - systemProperty 'javax.net.ssl.keyStorePassword', 'password' - } - project.pluginManager.withPlugin("elasticsearch.testclusters") { - project.testClusters.all { - systemProperty 'javax.net.ssl.trustStorePassword', 'password' - systemProperty 'javax.net.ssl.keyStorePassword', 'password' - } - } - } + if (project == project.rootProject) { + GlobalInfoExtension globalInfo = project.extensions.getByType(GlobalInfoExtension) - } - - - - /** Performs checks on the build environment and prints information about the build environment. */ - static void globalBuildInfo(Project project) { - if (project.rootProject.ext.has('buildChecksDone') == false) { - JavaVersion minimumRuntimeVersion = JavaVersion.toVersion( - BuildPlugin.class.getClassLoader().getResourceAsStream("minimumRuntimeVersion").text.trim() - ) - JavaVersion minimumCompilerVersion = JavaVersion.toVersion( - BuildPlugin.class.getClassLoader().getResourceAsStream("minimumCompilerVersion").text.trim() - ) - String compilerJavaHome = findCompilerJavaHome() - String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome) - File gradleJavaHome = Jvm.current().javaHome - - String javaVendor = System.getProperty('java.vendor') - String gradleJavaVersion = System.getProperty('java.version') - String gradleJavaVersionDetails = "${javaVendor} ${gradleJavaVersion}" + - " [${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]" - - String compilerJavaVersionDetails = gradleJavaVersionDetails - JavaVersion compilerJavaVersionEnum = JavaVersion.current() - if (new File(compilerJavaHome).canonicalPath != gradleJavaHome.canonicalPath) { - compilerJavaVersionDetails = findJavaVersionDetails(project, compilerJavaHome) - compilerJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, compilerJavaHome)) - } - - String runtimeJavaVersionDetails = gradleJavaVersionDetails - JavaVersion runtimeJavaVersionEnum = JavaVersion.current() - if (new File(runtimeJavaHome).canonicalPath != gradleJavaHome.canonicalPath) { - runtimeJavaVersionDetails = findJavaVersionDetails(project, runtimeJavaHome) - runtimeJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, runtimeJavaHome)) - } - - boolean inFipsJvm = false - if (new File(runtimeJavaHome).canonicalPath != gradleJavaHome.canonicalPath) { - // We don't expect Gradle to be running in a FIPS JVM - String inFipsJvmScript = 'print(java.security.Security.getProviders()[0].name.toLowerCase().contains("fips"));' - inFipsJvm = Boolean.parseBoolean(runJavaAsScript(project, runtimeJavaHome, inFipsJvmScript)) - } - - // Build debugging info - println '=======================================' - println 'Elasticsearch Build Hamster says Hello!' - println " Gradle Version : ${project.gradle.gradleVersion}" - println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})" - if (gradleJavaVersionDetails != compilerJavaVersionDetails || gradleJavaVersionDetails != runtimeJavaVersionDetails) { - println " Compiler JDK Version : ${compilerJavaVersionEnum} (${compilerJavaVersionDetails})" - println " Compiler java.home : ${compilerJavaHome}" - println " Runtime JDK Version : ${runtimeJavaVersionEnum} (${runtimeJavaVersionDetails})" - println " Runtime java.home : ${runtimeJavaHome}" - println " Gradle JDK Version : ${JavaVersion.toVersion(gradleJavaVersion)} (${gradleJavaVersionDetails})" - println " Gradle java.home : ${gradleJavaHome}" - } else { - println " JDK Version : ${JavaVersion.toVersion(gradleJavaVersion)} (${gradleJavaVersionDetails})" - println " JAVA_HOME : ${gradleJavaHome}" - } - println " Random Testing Seed : ${project.testSeed}" - println '=======================================' - - // enforce Java version - if (compilerJavaVersionEnum < minimumCompilerVersion) { - final String message = - "the compiler java.home must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + - " but is [${compilerJavaHome}] corresponding to [${compilerJavaVersionEnum}]" - throw new GradleException(message) - } - - if (runtimeJavaVersionEnum < minimumRuntimeVersion) { - final String message = - "the runtime java.home must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + - " but is [${runtimeJavaHome}] corresponding to [${runtimeJavaVersionEnum}]" - throw new GradleException(message) - } - - final Map javaVersions = [:] - for (int version = 8; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) { - if(System.getenv(getJavaHomeEnvVarName(version.toString())) != null) { - javaVersions.put(version, findJavaHome(version.toString())); - } - } - - final int numberOfPhysicalCores = numberOfPhysicalCores(project.rootProject) - if (javaVersions.isEmpty() == false) { - - ExecutorService exec = Executors.newFixedThreadPool(numberOfPhysicalCores) - Set> results = new HashSet<>() - - javaVersions.entrySet().stream() - .filter { it.getValue() != null } - .forEach { javaVersionEntry -> - results.add(exec.submit { - final String javaHome = javaVersionEntry.getValue() - final int version = javaVersionEntry.getKey() - if (project.file(javaHome).exists() == false) { - throw new GradleException("Invalid JAVA${version}_HOME=${javaHome} location does not exist") + // wait until global info is populated because we don't know if we are running in a fips jvm until execution time + globalInfo.ready { + project.subprojects { Project subproject -> + ExtraPropertiesExtension ext = subproject.extensions.getByType(ExtraPropertiesExtension) + // Common config when running with a FIPS-140 runtime JVM + if (ext.has('inFipsJvm') && ext.get('inFipsJvm')) { + subproject.tasks.withType(Test) { Test task -> + task.systemProperty 'javax.net.ssl.trustStorePassword', 'password' + task.systemProperty 'javax.net.ssl.keyStorePassword', 'password' } - - JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome)) - final JavaVersion expectedJavaVersionEnum = version < 9 ? - JavaVersion.toVersion("1." + version) : - JavaVersion.toVersion(Integer.toString(version)) - - if (javaVersionEnum != expectedJavaVersionEnum) { - final String message = - "the environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java" + - " ${expectedJavaVersionEnum} but is [${javaHome}] corresponding to [${javaVersionEnum}]" - throw new GradleException(message) + project.pluginManager.withPlugin("elasticsearch.testclusters") { + NamedDomainObjectContainer testClusters = subproject.extensions.getByName('testClusters') as NamedDomainObjectContainer + testClusters.all { ElasticsearchCluster cluster -> + cluster.systemProperty 'javax.net.ssl.trustStorePassword', 'password' + cluster.systemProperty 'javax.net.ssl.keyStorePassword', 'password' + } } - }) - } - - project.gradle.taskGraph.whenReady { - try { - results.forEach { it.get() } - } finally { - exec.shutdown(); } } } - - project.rootProject.ext.compilerJavaHome = compilerJavaHome - project.rootProject.ext.runtimeJavaHome = runtimeJavaHome - project.rootProject.ext.compilerJavaVersion = compilerJavaVersionEnum - project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum - project.rootProject.ext.isRuntimeJavaHomeSet = compilerJavaHome.equals(runtimeJavaHome) == false - project.rootProject.ext.javaVersions = javaVersions - project.rootProject.ext.buildChecksDone = true - project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion - project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion - project.rootProject.ext.inFipsJvm = inFipsJvm - project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion) - project.rootProject.ext.java9Home = "${-> findJavaHome("9")}" - project.rootProject.ext.defaultParallel = numberOfPhysicalCores } - - project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion - project.sourceCompatibility = project.rootProject.ext.minimumRuntimeVersion - - // set java home for each project, so they dont have to find it in the root project - project.ext.compilerJavaHome = project.rootProject.ext.compilerJavaHome - project.ext.runtimeJavaHome = project.rootProject.ext.runtimeJavaHome - project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion - project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion - project.ext.isRuntimeJavaHomeSet = project.rootProject.ext.isRuntimeJavaHomeSet - project.ext.javaVersions = project.rootProject.ext.javaVersions - project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm - project.ext.gradleJavaVersion = project.rootProject.ext.gradleJavaVersion - project.ext.java9Home = project.rootProject.ext.java9Home } static void requireDocker(final Task task) { final Project rootProject = task.project.rootProject + ExtraPropertiesExtension ext = rootProject.extensions.getByType(ExtraPropertiesExtension) + if (rootProject.hasProperty('requiresDocker') == false) { /* * This is our first time encountering a task that requires Docker. We will add an extension that will let us track the tasks @@ -315,11 +199,11 @@ class BuildPlugin implements Plugin { throw new IllegalArgumentException( "expected build.docker to be unset or one of \"true\" or \"false\" but was [" + buildDockerProperty + "]") } - rootProject.rootProject.ext.buildDocker = buildDocker - rootProject.rootProject.ext.requiresDocker = [] + + ext.set('buildDocker', buildDocker) + ext.set('requiresDocker', []) rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph -> - final List tasks = - ((List)rootProject.requiresDocker).findAll { taskGraph.hasTask(it) }.collect { " ${it.path}".toString()} + final List tasks = taskGraph.allTasks.intersect(ext.get('requiresDocker') as List).collect { " ${it.path}".toString()} if (tasks.isEmpty() == false) { /* * There are tasks in the task graph that require Docker. Now we are failing because either the Docker binary does not @@ -372,8 +256,9 @@ class BuildPlugin implements Plugin { } } } - if (rootProject.buildDocker) { - rootProject.requiresDocker.add(task) + + if (ext.get('buildDocker')) { + (ext.get('requiresDocker') as List).add(task) } else { task.enabled = false } @@ -401,130 +286,48 @@ class BuildPlugin implements Plugin { + "or by passing -Dbuild.docker=false") } - private static String findCompilerJavaHome() { - String compilerJavaHome = System.getenv('JAVA_HOME') - final String compilerJavaProperty = System.getProperty('compiler.java') - if (compilerJavaProperty != null) { - compilerJavaHome = findJavaHome(compilerJavaProperty) - } - if (compilerJavaHome == null) { - // if JAVA_HOME does not set,so we use the JDK that Gradle was run with. - return Jvm.current().javaHome - } - return compilerJavaHome - } - - private static String findJavaHome(String version) { - String versionedVarName = getJavaHomeEnvVarName(version) - String versionedJavaHome = System.getenv(versionedVarName); - if (versionedJavaHome == null) { - throw new GradleException( - "$versionedVarName must be set to build Elasticsearch. " + - "Note that if the variable was just set you might have to run `./gradlew --stop` for " + - "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." - ) - } - return versionedJavaHome - } - - private static String getJavaHomeEnvVarName(String version) { - return 'JAVA' + version + '_HOME' - } - /** Add a check before gradle execution phase which ensures java home for the given java version is set. */ static void requireJavaHome(Task task, int version) { - Project rootProject = task.project.rootProject // use root project for global accounting + // use root project for global accounting + Project rootProject = task.project.rootProject + ExtraPropertiesExtension ext = rootProject.extensions.getByType(ExtraPropertiesExtension) + if (rootProject.hasProperty('requiredJavaVersions') == false) { - rootProject.rootProject.ext.requiredJavaVersions = [:] - rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph -> + ext.set('requiredJavaVersions', [:]) + rootProject.gradle.taskGraph.whenReady({ TaskExecutionGraph taskGraph -> List messages = [] - for (entry in rootProject.requiredJavaVersions) { - if (rootProject.javaVersions.get(entry.key) != null) { + Map> requiredJavaVersions = (Map>) ext.get('requiredJavaVersions') + for (Map.Entry> entry : requiredJavaVersions) { + List javaVersions = ext.get('javaVersions') as List + if (javaVersions.find { it.version == entry.key } != null) { continue } - List tasks = entry.value.findAll { taskGraph.hasTask(it) }.collect { " ${it.path}" } + List tasks = entry.value.findAll { taskGraph.hasTask(it) }.collect { " ${it.path}".toString() } if (tasks.isEmpty() == false) { - messages.add("JAVA${entry.key}_HOME required to run tasks:\n${tasks.join('\n')}") + messages.add("JAVA${entry.key}_HOME required to run tasks:\n${tasks.join('\n')}".toString()) } } if (messages.isEmpty() == false) { throw new GradleException(messages.join('\n')) } - rootProject.rootProject.ext.requiredJavaVersions = null // reset to null to indicate the pre-execution checks have executed - } - } else if (rootProject.rootProject.requiredJavaVersions == null) { + ext.set('requiredJavaVersions', null) // reset to null to indicate the pre-execution checks have executed + }) + } else if (ext.has('requiredJavaVersions') == false || ext.get('requiredJavaVersions') == null) { // check directly if the version is present since we are already executing - if (rootProject.javaVersions.get(version) == null) { + List javaVersions = ext.get('javaVersions') as List + if (javaVersions.find { it.version == version } == null) { throw new GradleException("JAVA${version}_HOME required to run task:\n${task}") } } else { - rootProject.requiredJavaVersions.getOrDefault(version, []).add(task) + (ext.get('requiredJavaVersions') as Map>).getOrDefault(version, []).add(task) } } /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */ static String getJavaHome(final Task task, final int version) { requireJavaHome(task, version) - return task.project.javaVersions.get(version) - } - - private static String findRuntimeJavaHome(final String compilerJavaHome) { - String runtimeJavaProperty = System.getProperty("runtime.java") - if (runtimeJavaProperty != null) { - return findJavaHome(runtimeJavaProperty) - } - return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome - } - - /** Finds printable java version of the given JAVA_HOME */ - private static String findJavaVersionDetails(Project project, String javaHome) { - String versionInfoScript = 'print(' + - 'java.lang.System.getProperty("java.vendor") + " " + java.lang.System.getProperty("java.version") + ' + - '" [" + java.lang.System.getProperty("java.vm.name") + " " + java.lang.System.getProperty("java.vm.version") + "]");' - return runJavaAsScript(project, javaHome, versionInfoScript).trim() - } - - /** Finds the parsable java specification version */ - private static String findJavaSpecificationVersion(Project project, String javaHome) { - String versionScript = 'print(java.lang.System.getProperty("java.specification.version"));' - return runJavaAsScript(project, javaHome, versionScript) - } - - private static String findJavaVendor(Project project, String javaHome) { - String vendorScript = 'print(java.lang.System.getProperty("java.vendor"));' - return runJavaAsScript(project, javaHome, vendorScript) - } - - /** Finds the parsable java specification version */ - private static String findJavaVersion(Project project, String javaHome) { - String versionScript = 'print(java.lang.System.getProperty("java.version"));' - return runJavaAsScript(project, javaHome, versionScript) - } - - /** Runs the given javascript using jjs from the jdk, and returns the output */ - private static String runJavaAsScript(Project project, String javaHome, String script) { - ByteArrayOutputStream stdout = new ByteArrayOutputStream() - ByteArrayOutputStream stderr = new ByteArrayOutputStream() - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - // gradle/groovy does not properly escape the double quote for windows - script = script.replace('"', '\\"') - } - File jrunscriptPath = new File(javaHome, 'bin/jrunscript') - ExecResult result = project.exec { - executable = jrunscriptPath - args '-e', script - standardOutput = stdout - errorOutput = stderr - ignoreExitValue = true - } - if (result.exitValue != 0) { - project.logger.error("STDOUT:") - stdout.toString('UTF-8').eachLine { line -> project.logger.error(line) } - project.logger.error("STDERR:") - stderr.toString('UTF-8').eachLine { line -> project.logger.error(line) } - result.rethrowFailure() - } - return stdout.toString('UTF-8').trim() + List javaVersions = task.project.property('javaVersions') as List + return javaVersions.find { it.version == version }.javaHome.absolutePath } /** Return the configuration name used for finding transitive deps of the given dependency. */ @@ -550,7 +353,7 @@ class BuildPlugin implements Plugin { */ static void configureConfigurations(Project project) { // we want to test compileOnly deps! - project.configurations.testCompile.extendsFrom(project.configurations.compileOnly) + project.configurations.getByName(JavaPlugin.TEST_COMPILE_CONFIGURATION_NAME).extendsFrom(project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME)) // we are not shipping these jars, we act like dumb consumers of these things if (project.path.startsWith(':test:fixtures') || project.path == ':build-tools') { @@ -588,9 +391,9 @@ class BuildPlugin implements Plugin { } } - project.configurations.compile.dependencies.all(disableTransitiveDeps) - project.configurations.testCompile.dependencies.all(disableTransitiveDeps) - project.configurations.compileOnly.dependencies.all(disableTransitiveDeps) + project.configurations.getByName(JavaPlugin.COMPILE_CONFIGURATION_NAME).dependencies.all(disableTransitiveDeps) + project.configurations.getByName(JavaPlugin.TEST_COMPILE_CONFIGURATION_NAME).dependencies.all(disableTransitiveDeps) + project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME).dependencies.all(disableTransitiveDeps) project.plugins.withType(ShadowPlugin).whenPluginAdded { Configuration bundle = project.configurations.create('bundle') @@ -604,46 +407,45 @@ class BuildPlugin implements Plugin { if (repository instanceof MavenArtifactRepository) { final MavenArtifactRepository maven = (MavenArtifactRepository) repository assertRepositoryURIUsesHttps(maven, project, maven.getUrl()) - repository.getArtifactUrls().each { uri -> assertRepositoryURIUsesHttps(project, uri) } + repository.getArtifactUrls().each { uri -> assertRepositoryURIUsesHttps(maven, project, uri) } } else if (repository instanceof IvyArtifactRepository) { final IvyArtifactRepository ivy = (IvyArtifactRepository) repository assertRepositoryURIUsesHttps(ivy, project, ivy.getUrl()) } } RepositoryHandler repos = project.repositories - if (System.getProperty("repos.mavenLocal") != null) { + if (System.getProperty('repos.mavenLocal') != null) { // with -Drepos.mavenLocal=true we can force checking the local .m2 repo which is // useful for development ie. bwc tests where we install stuff in the local repository // such that we don't have to pass hardcoded files to gradle repos.mavenLocal() } repos.jcenter() - repos.ivy { - name "elasticsearch" - url "https://artifacts.elastic.co/downloads" - patternLayout { - artifact "elasticsearch/[module]-[revision](-[classifier]).[ext]" + repos.ivy { IvyArtifactRepository repo -> + repo.name = 'elasticsearch' + repo.url = 'https://artifacts.elastic.co/downloads' + repo.patternLayout { IvyPatternRepositoryLayout layout -> + layout.artifact 'elasticsearch/[module]-[revision](-[classifier]).[ext]' } // this header is not a credential but we hack the capability to send this header to avoid polluting our download stats - credentials(HttpHeaderCredentials) { - name = "X-Elastic-No-KPI" - value = "1" - } - authentication { - header(HttpHeaderAuthentication) - } + repo.credentials(HttpHeaderCredentials, { HttpHeaderCredentials creds -> + creds.name = 'X-Elastic-No-KPI' + creds.value = '1' + } as Action) + repo.authentication.create('header', HttpHeaderAuthentication) } - repos.maven { - name "elastic" - url "https://artifacts.elastic.co/maven" + repos.maven { MavenArtifactRepository repo -> + repo.name = 'elastic' + repo.url = 'https://artifacts.elastic.co/maven' } String luceneVersion = VersionProperties.lucene if (luceneVersion.contains('-snapshot')) { // extract the revision number from the version with a regex matcher - String revision = (luceneVersion =~ /\w+-snapshot-([a-z0-9]+)/)[0][1] - repos.maven { - name 'lucene-snapshots' - url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/${revision}" + List matches = (luceneVersion =~ /\w+-snapshot-([a-z0-9]+)/).getAt(0) as List + String revision = matches.get(1) + repos.maven { MavenArtifactRepository repo -> + repo.name = 'lucene-snapshots' + repo.url = "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/${revision}" } } } @@ -665,6 +467,7 @@ class BuildPlugin implements Plugin { *
  • Set compile time deps back to compile from runtime (known issue with maven-publish plugin)
  • * */ + @CompileDynamic private static Closure fixupDependencies(Project project) { return { XmlProvider xml -> // first find if we have dependencies at all, and grab the node @@ -725,21 +528,22 @@ class BuildPlugin implements Plugin { } /**Configuration generation of maven poms. */ - public static void configurePomGeneration(Project project) { + static void configurePomGeneration(Project project) { // Only works with `enableFeaturePreview('STABLE_PUBLISHING')` // https://github.com/gradle/gradle/issues/5696#issuecomment-396965185 project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask -> // The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it, // just make a copy. - generatePOMTask.ext.pomFileName = null - doLast { - project.copy { - from generatePOMTask.destination - into "${project.buildDir}/distributions" - rename { - generatePOMTask.ext.pomFileName == null ? - "${project.archivesBaseName}-${project.version}.pom" : - generatePOMTask.ext.pomFileName + ExtraPropertiesExtension ext = generatePOMTask.extensions.getByType(ExtraPropertiesExtension) + ext.set('pomFileName', null) + generatePOMTask.doLast { + project.copy { CopySpec spec -> + spec.from generatePOMTask.destination + spec.into "${project.buildDir}/distributions" + spec.rename { + ext.has('pomFileName') && ext.get('pomFileName') == null ? + "${project.convention.getPlugin(BasePluginConvention).archivesBaseName}-${project.version}.pom" : + ext.get('pomFileName') } } } @@ -749,22 +553,16 @@ class BuildPlugin implements Plugin { assemble.dependsOn(generatePOMTask) } } - project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded { - project.publishing { - publications { - all { MavenPublication publication -> // we only deal with maven - // add exclusions to the pom directly, for each of the transitive deps of this project's deps - publication.pom.withXml(fixupDependencies(project)) - } - } + project.plugins.withType(MavenPublishPlugin).whenPluginAdded { + PublishingExtension publishing = project.extensions.getByType(PublishingExtension) + publishing.publications.all { MavenPublication publication -> // we only deal with maven + // add exclusions to the pom directly, for each of the transitive deps of this project's deps + publication.pom.withXml(fixupDependencies(project)) } project.plugins.withType(ShadowPlugin).whenPluginAdded { - project.publishing { - publications { - nebula(MavenPublication) { - artifacts = [ project.tasks.shadowJar ] - } - } + MavenPublication publication = publishing.publications.maybeCreate('nebula', MavenPublication) + publication.with { + artifacts = [ project.tasks.getByName('shadowJar') ] } } } @@ -776,9 +574,9 @@ class BuildPlugin implements Plugin { static void configureSourceSets(Project project) { project.plugins.withType(ShadowPlugin).whenPluginAdded { ['main', 'test'].each {name -> - SourceSet sourceSet = project.sourceSets.findByName(name) + SourceSet sourceSet = project.extensions.getByType(SourceSetContainer).findByName(name) if (sourceSet != null) { - sourceSet.compileClasspath += project.configurations.bundle + sourceSet.compileClasspath += project.configurations.getByName('bundle') } } } @@ -793,27 +591,39 @@ class BuildPlugin implements Plugin { /** Adds compiler settings to the project */ static void configureCompile(Project project) { - if (project.compilerJavaVersion < JavaVersion.VERSION_1_10) { - project.ext.compactProfile = 'compact3' - } else { - project.ext.compactProfile = 'full' + ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) + GlobalInfoExtension globalBuildInfo = project.rootProject.extensions.getByType(GlobalInfoExtension) + globalBuildInfo.ready { + if ((ext.get('compilerJavaVersion') as JavaVersion) < JavaVersion.VERSION_1_10) { + ext.set('compactProfile', 'compact3') + } else { + ext.set('compactProfile', 'full') + } } + ext.set('compactProfile', 'full') + + project.extensions.getByType(JavaPluginExtension).sourceCompatibility = ext.get('minimumRuntimeVersion') as JavaVersion + project.extensions.getByType(JavaPluginExtension).targetCompatibility = ext.get('minimumRuntimeVersion') as JavaVersion + project.afterEvaluate { - project.tasks.withType(JavaCompile) { - final JavaVersion targetCompatibilityVersion = JavaVersion.toVersion(it.targetCompatibility) - final compilerJavaHomeFile = new File(project.compilerJavaHome) + File compilerJavaHome = ext.get('compilerJavaHome') as File + + project.tasks.withType(JavaCompile) { JavaCompile compileTask -> + final JavaVersion targetCompatibilityVersion = JavaVersion.toVersion(compileTask.targetCompatibility) // we only fork if the Gradle JDK is not the same as the compiler JDK - if (compilerJavaHomeFile.canonicalPath == Jvm.current().javaHome.canonicalPath) { - options.fork = false + if (compilerJavaHome.canonicalPath == Jvm.current().javaHome.canonicalPath) { + compileTask.options.fork = false } else { - options.fork = true - options.forkOptions.javaHome = compilerJavaHomeFile + compileTask.options.fork = true + compileTask.options.forkOptions.javaHome = compilerJavaHome } if (targetCompatibilityVersion == JavaVersion.VERSION_1_8) { - // compile with compact 3 profile by default - // NOTE: this is just a compile time check: does not replace testing with a compact3 JRE - if (project.compactProfile != 'full') { - options.compilerArgs << '-profile' << project.compactProfile + globalBuildInfo.ready { + // compile with compact 3 profile by default + // NOTE: this is just a compile time check: does not replace testing with a compact3 JRE + if (ext.get('compactProfile') != 'full') { + compileTask.options.compilerArgs << '-profile' << ext.get('compactProfile').toString() + } } } /* @@ -823,29 +633,28 @@ class BuildPlugin implements Plugin { */ // don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :) // fail on all javac warnings - options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial,-options,-deprecation,-try' << '-Xdoclint:all' << '-Xdoclint:-missing' + compileTask.options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial,-options,-deprecation,-try' << '-Xdoclint:all' << '-Xdoclint:-missing' // either disable annotation processor completely (default) or allow to enable them if an annotation processor is explicitly defined - if (options.compilerArgs.contains("-processor") == false) { - options.compilerArgs << '-proc:none' + if (compileTask.options.compilerArgs.contains("-processor") == false) { + compileTask.options.compilerArgs << '-proc:none' } - options.encoding = 'UTF-8' - options.incremental = true + compileTask.options.encoding = 'UTF-8' + compileTask.options.incremental = true // TODO: use native Gradle support for --release when available (cf. https://github.com/gradle/gradle/issues/2510) - options.compilerArgs << '--release' << targetCompatibilityVersion.majorVersion + compileTask.options.compilerArgs << '--release' << targetCompatibilityVersion.majorVersion } // also apply release flag to groovy, which is used in build-tools - project.tasks.withType(GroovyCompile) { - final compilerJavaHomeFile = new File(project.compilerJavaHome) + project.tasks.withType(GroovyCompile) { GroovyCompile compileTask -> // we only fork if the Gradle JDK is not the same as the compiler JDK - if (compilerJavaHomeFile.canonicalPath == Jvm.current().javaHome.canonicalPath) { - options.fork = false + if (compilerJavaHome.canonicalPath == Jvm.current().javaHome.canonicalPath) { + compileTask.options.fork = false } else { - options.fork = true - options.forkOptions.javaHome = compilerJavaHomeFile - options.compilerArgs << '--release' << JavaVersion.toVersion(it.targetCompatibility).majorVersion + compileTask.options.fork = true + compileTask.options.forkOptions.javaHome = compilerJavaHome + compileTask.options.compilerArgs << '--release' << JavaVersion.toVersion(compileTask.targetCompatibility).majorVersion } } } @@ -854,11 +663,12 @@ class BuildPlugin implements Plugin { static void configureJavadoc(Project project) { // remove compiled classes from the Javadoc classpath: http://mail.openjdk.java.net/pipermail/javadoc-dev/2018-January/000400.html final List classes = new ArrayList<>() - project.tasks.withType(JavaCompile) { javaCompile -> + project.tasks.withType(JavaCompile) { JavaCompile javaCompile -> classes.add(javaCompile.destinationDir) } - project.tasks.withType(Javadoc) { javadoc -> - javadoc.executable = new File(project.compilerJavaHome, 'bin/javadoc') + project.tasks.withType(Javadoc) { Javadoc javadoc -> + File compilerJavaHome = project.extensions.getByType(ExtraPropertiesExtension).get('compilerJavaHome') as File + javadoc.executable = new File(compilerJavaHome, 'bin/javadoc') javadoc.classpath = javadoc.getClasspath().filter { f -> return classes.contains(f) == false } @@ -866,34 +676,35 @@ class BuildPlugin implements Plugin { * Generate docs using html5 to suppress a warning from `javadoc` * that the default will change to html5 in the future. */ - javadoc.options.addBooleanOption('html5', true) + (javadoc.options as CoreJavadocOptions).addBooleanOption('html5', true) } configureJavadocJar(project) } /** Adds a javadocJar task to generate a jar containing javadocs. */ static void configureJavadocJar(Project project) { - Jar javadocJarTask = project.task('javadocJar', type: Jar) + Jar javadocJarTask = project.tasks.create('javadocJar', Jar) javadocJarTask.classifier = 'javadoc' javadocJarTask.group = 'build' javadocJarTask.description = 'Assembles a jar containing javadocs.' javadocJarTask.from(project.tasks.getByName(JavaPlugin.JAVADOC_TASK_NAME)) - project.assemble.dependsOn(javadocJarTask) + project.tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(javadocJarTask) } static void configureSourcesJar(Project project) { - Jar sourcesJarTask = project.task('sourcesJar', type: Jar) + Jar sourcesJarTask = project.tasks.create('sourcesJar', Jar) sourcesJarTask.classifier = 'sources' sourcesJarTask.group = 'build' sourcesJarTask.description = 'Assembles a jar containing source files.' - sourcesJarTask.from(project.sourceSets.main.allSource) - project.assemble.dependsOn(sourcesJarTask) + sourcesJarTask.from(project.extensions.getByType(SourceSetContainer).getByName(SourceSet.MAIN_SOURCE_SET_NAME).allSource) + project.tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(sourcesJarTask) } /** Adds additional manifest info to jars */ static void configureJars(Project project) { - project.ext.licenseFile = null - project.ext.noticeFile = null + ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) + ext.set('licenseFile', null) + ext.set('noticeFile', null) project.tasks.withType(Jar) { Jar jarTask -> // we put all our distributable files under distributions jarTask.destinationDir = new File(project.buildDir, 'distributions') @@ -901,14 +712,15 @@ class BuildPlugin implements Plugin { jarTask.doFirst { // this doFirst is added before the info plugin, therefore it will run // after the doFirst added by the info plugin, and we can override attributes + JavaVersion compilerJavaVersion = ext.get('compilerJavaVersion') as JavaVersion jarTask.manifest.attributes( 'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch, 'X-Compile-Lucene-Version': VersionProperties.lucene, 'X-Compile-Elasticsearch-Snapshot': VersionProperties.isElasticsearchSnapshot(), 'Build-Date': ZonedDateTime.now(ZoneOffset.UTC), - 'Build-Java-Version': project.compilerJavaVersion) + 'Build-Java-Version': compilerJavaVersion) if (jarTask.manifest.attributes.containsKey('Change') == false) { - logger.warn('Building without git revision id.') + jarTask.logger.warn('Building without git revision id.') jarTask.manifest.attributes('Change': 'Unknown') } else { /* @@ -923,19 +735,24 @@ class BuildPlugin implements Plugin { } } } + // add license/notice files project.afterEvaluate { - if (project.licenseFile == null || project.noticeFile == null) { + if (ext.has('licenseFile') == false || ext.get('licenseFile') == null || ext.has('noticeFile') == false || ext.get('noticeFile') == null) { throw new GradleException("Must specify license and notice file for project ${project.path}") } - jarTask.metaInf { - from(project.licenseFile.parent) { - include project.licenseFile.name - rename { 'LICENSE.txt' } + + File licenseFile = ext.get('licenseFile') as File + File noticeFile = ext.get('noticeFile') as File + + jarTask.metaInf { CopySpec spec -> + spec.from(licenseFile.parent) { CopySpec from -> + from.include licenseFile.name + from.rename { 'LICENSE.txt' } } - from(project.noticeFile.parent) { - include project.noticeFile.name - rename { 'NOTICE.txt' } + spec.from(noticeFile.parent) { CopySpec from -> + from.include noticeFile.name + from.rename { 'NOTICE.txt' } } } } @@ -946,35 +763,35 @@ class BuildPlugin implements Plugin { * normal jar with the shadow jar so we no longer want to run * the jar task. */ - project.tasks.jar.enabled = false - project.tasks.shadowJar { + project.tasks.getByName(JavaPlugin.JAR_TASK_NAME).enabled = false + project.tasks.getByName('shadowJar').configure { ShadowJar shadowJar -> /* * Replace the default "shadow" classifier with null * which will leave the classifier off of the file name. */ - classifier = null + shadowJar.classifier = null /* * Not all cases need service files merged but it is * better to be safe */ - mergeServiceFiles() + shadowJar.mergeServiceFiles() /* * Bundle dependencies of the "bundled" configuration. */ - configurations = [project.configurations.bundle] + shadowJar.configurations = [project.configurations.getByName('bundle')] } // Make sure we assemble the shadow jar - project.tasks.assemble.dependsOn project.tasks.shadowJar - project.artifacts { - apiElements project.tasks.shadowJar - } + project.tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn project.tasks.getByName('shadowJar') + project.artifacts.add('apiElements', project.tasks.getByName('shadowJar')) } } static void configureTestTasks(Project project) { + ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) + // Default test task should run only unit tests - project.tasks.withType(Test).matching { it.name == 'test' }.all { - include '**/*Tests.class' + project.tasks.withType(Test).matching { Test task -> task.name == 'test' }.all { Test task -> + task.include '**/*Tests.class' } // none of this stuff is applicable to the `:buildSrc` project tests @@ -984,150 +801,130 @@ class BuildPlugin implements Plugin { project.tasks.withType(Test) { Test test -> File testOutputDir = new File(test.reports.junitXml.getDestination(), "output") - doFirst { + ErrorReportingTestListener listener = new ErrorReportingTestListener(test.testLogging, testOutputDir) + test.extensions.add(ErrorReportingTestListener, 'errorReportingTestListener', listener) + test.addTestOutputListener(listener) + test.addTestListener(listener) + + /* + * We use lazy-evaluated strings in order to configure system properties whose value will not be known until + * execution time (e.g. cluster port numbers). Adding these via the normal DSL doesn't work as these get treated + * as task inputs and therefore Gradle attempts to snapshot them before/after task execution. This fails due + * to the GStrings containing references to non-serializable objects. + * + * We bypass this by instead passing this system properties vi a CommandLineArgumentProvider. This has the added + * side-effect that these properties are NOT treated as inputs, therefore they don't influence things like the + * build cache key or up to date checking. + */ + SystemPropertyCommandLineArgumentProvider nonInputProperties = new SystemPropertyCommandLineArgumentProvider() + + test.doFirst { project.mkdir(testOutputDir) project.mkdir(heapdumpDir) project.mkdir(test.workingDir) + + if (project.property('inFipsJvm')) { + nonInputProperties.systemProperty('runtime.java', "${-> (ext.get('runtimeJavaVersion') as JavaVersion).getMajorVersion()}FIPS") + } else { + nonInputProperties.systemProperty('runtime.java', "${-> (ext.get('runtimeJavaVersion') as JavaVersion).getMajorVersion()}") + } + + if ((ext.get('runtimeJavaVersion') as JavaVersion) >= JavaVersion.VERSION_1_9) { + test.jvmArgs '--illegal-access=warn' + } } - def listener = new ErrorReportingTestListener(test.testLogging, testOutputDir) - test.extensions.add(ErrorReportingTestListener, 'errorReportingTestListener', listener) - addTestOutputListener(listener) - addTestListener(listener) + test.jvmArgumentProviders.add(nonInputProperties) + test.extensions.getByType(ExtraPropertiesExtension).set('nonInputProperties', nonInputProperties) - executable = "${project.runtimeJavaHome}/bin/java" - workingDir = project.file("${project.buildDir}/testrun/${test.name}") - maxParallelForks = project.rootProject.ext.defaultParallel + test.executable = "${ext.get('runtimeJavaHome')}/bin/java" + test.workingDir = project.file("${project.buildDir}/testrun/${test.name}") + test.maxParallelForks = project.rootProject.extensions.getByType(ExtraPropertiesExtension).get('defaultParallel') as Integer - exclude '**/*$*.class' + test.exclude '**/*$*.class' - jvmArgs "-Xmx${System.getProperty('tests.heap.size', '512m')}", + test.jvmArgs "-Xmx${System.getProperty('tests.heap.size', '512m')}", "-Xms${System.getProperty('tests.heap.size', '512m')}", '-XX:+HeapDumpOnOutOfMemoryError', "-XX:HeapDumpPath=$heapdumpDir" - if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9) { - jvmArgs '--illegal-access=warn' - } if (System.getProperty('tests.jvm.argline')) { - jvmArgs System.getProperty('tests.jvm.argline').split(" ") + test.jvmArgs System.getProperty('tests.jvm.argline').split(" ") } if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { - jvmArgs '-ea', '-esa' + test.jvmArgs '-ea', '-esa' } // we use './temp' since this is per JVM and tests are forbidden from writing to CWD - systemProperties 'gradle.dist.lib': new File(project.class.location.toURI()).parent, + test.systemProperties 'gradle.dist.lib': new File(project.class.location.toURI()).parent, 'gradle.worker.jar': "${project.gradle.getGradleUserHomeDir()}/caches/${project.gradle.gradleVersion}/workerMain/gradle-worker.jar", 'gradle.user.home': project.gradle.getGradleUserHomeDir(), 'java.io.tmpdir': './temp', 'java.awt.headless': 'true', 'tests.gradle': 'true', 'tests.artifact': project.name, - 'tests.task': path, + 'tests.task': test.path, 'tests.security.manager': 'true', - 'tests.seed': project.testSeed, - 'jna.nosys': 'true', - 'compiler.java': project.ext.compilerJavaVersion.getMajorVersion() + 'tests.seed': project.property('testSeed'), + 'jna.nosys': 'true' + + nonInputProperties.systemProperty('compiler.java', "${-> (ext.get('compilerJavaVersion') as JavaVersion).getMajorVersion()}") - if (project.ext.inFipsJvm) { - systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS" - } else { - systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() - } // TODO: remove setting logging level via system property - systemProperty 'tests.logger.level', 'WARN' + test.systemProperty 'tests.logger.level', 'WARN' System.getProperties().each { key, value -> - if ((key.startsWith('tests.') || key.startsWith('es.'))) { - systemProperty key, value + if ((key.toString().startsWith('tests.') || key.toString().startsWith('es.'))) { + test.systemProperty key.toString(), value } } // TODO: remove this once ctx isn't added to update script params in 7.0 - systemProperty 'es.scripting.update.ctx_in_params', 'false' + test.systemProperty 'es.scripting.update.ctx_in_params', 'false' - testLogging { - showExceptions = true - showCauses = true - exceptionFormat = 'full' + test.testLogging { TestLoggingContainer logging -> + logging.showExceptions = true + logging.showCauses = true + logging.exceptionFormat = 'full' } project.plugins.withType(ShadowPlugin).whenPluginAdded { // Test against a shadow jar if we made one - classpath -= project.tasks.compileJava.outputs.files - classpath += project.tasks.shadowJar.outputs.files + test.classpath -= project.tasks.getByName('compileJava').outputs.files + test.classpath += project.tasks.getByName('shadowJar').outputs.files - dependsOn project.tasks.shadowJar + test.dependsOn project.tasks.getByName('shadowJar') } } } } - private static int numberOfPhysicalCores(Project project) { - if (project.file("/proc/cpuinfo").exists()) { - // Count physical cores on any Linux distro ( don't count hyper-threading ) - Map socketToCore = [:] - String currentID = "" - project.file("/proc/cpuinfo").readLines().forEach({ line -> - if (line.contains(":")) { - List parts = line.split(":", 2).collect({it.trim()}) - String name = parts[0], value = parts[1] - // the ID of the CPU socket - if (name == "physical id") { - currentID = value - } - // number of cores not including hyper-threading - if (name == "cpu cores") { - assert currentID.isEmpty() == false - socketToCore[currentID] = Integer.valueOf(value) - currentID = "" - } - } - }) - return socketToCore.values().sum() - } else if ('Mac OS X'.equals(System.getProperty('os.name'))) { - // Ask macOS to count physical CPUs for us - ByteArrayOutputStream stdout = new ByteArrayOutputStream() - project.exec { - executable 'sysctl' - args '-n', 'hw.physicalcpu' - standardOutput = stdout - } - return Integer.parseInt(stdout.toString('UTF-8').trim()) - } else { - // guess that it is half the number of processors (which is wrong on systems that do not have simultaneous multi-threading) - // TODO: implement this on Windows - return Runtime.getRuntime().availableProcessors() / 2 - } - } - private static configurePrecommit(Project project) { Task precommit = PrecommitTasks.create(project, true) - project.check.dependsOn(precommit) - project.test.mustRunAfter(precommit) + project.tasks.getByName(LifecycleBasePlugin.CHECK_TASK_NAME).dependsOn(precommit) + project.tasks.getByName(JavaPlugin.TEST_TASK_NAME).mustRunAfter(precommit) // only require dependency licenses for non-elasticsearch deps - project.dependencyLicenses.dependencies = project.configurations.runtime.fileCollection { - it.group.startsWith('org.elasticsearch') == false - } - project.configurations.compileOnly + (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).dependencies = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME).fileCollection { Dependency dependency -> + dependency.group.startsWith('org.elasticsearch') == false + } - project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME) project.plugins.withType(ShadowPlugin).whenPluginAdded { - project.dependencyLicenses.dependencies += project.configurations.bundle.fileCollection { - it.group.startsWith('org.elasticsearch') == false + (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).dependencies += project.configurations.getByName('bundle').fileCollection { Dependency dependency -> + dependency.group.startsWith('org.elasticsearch') == false } } } private static configureDependenciesInfo(Project project) { - Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class) - deps.runtimeConfiguration = project.configurations.runtime + DependenciesInfoTask deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask) + deps.runtimeConfiguration = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME) project.plugins.withType(ShadowPlugin).whenPluginAdded { deps.runtimeConfiguration = project.configurations.create('infoDeps') - deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.bundle) + deps.runtimeConfiguration.extendsFrom(project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME), project.configurations.getByName('bundle')) } - deps.compileOnlyConfiguration = project.configurations.compileOnly + deps.compileOnlyConfiguration = project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME) project.afterEvaluate { - deps.mappings = project.dependencyLicenses.mappings + deps.mappings = (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).mappings } } @@ -1139,11 +936,12 @@ class BuildPlugin implements Plugin { * the reproduction line from one run be useful on another run. */ static String setupSeed(Project project) { - if (project.rootProject.ext.has('testSeed')) { + ExtraPropertiesExtension ext = project.rootProject.extensions.getByType(ExtraPropertiesExtension) + if (ext.has('testSeed')) { /* Skip this if we've already pinned the testSeed. It is important * that this checks the rootProject so that we know we've only ever * initialized one time. */ - return project.rootProject.ext.testSeed + return ext.get('testSeed') } String testSeed = System.getProperty('tests.seed') @@ -1152,7 +950,7 @@ class BuildPlugin implements Plugin { testSeed = Long.toUnsignedString(seed, 16).toUpperCase(Locale.ROOT) } - project.rootProject.ext.testSeed = testSeed + ext.set('testSeed', testSeed) return testSeed } @@ -1184,4 +982,19 @@ class BuildPlugin implements Plugin { }) } } + + private static class SystemPropertyCommandLineArgumentProvider implements CommandLineArgumentProvider { + private final Map systemProperties = [:] + + void systemProperty(String key, Object value) { + systemProperties.put(key, value) + } + + @Override + Iterable asArguments() { + return systemProperties.collect { key, value -> + "-D${key}=${value.toString()}".toString() + } + } + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index d5bdd211702..e04d0966c41 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -28,6 +28,7 @@ import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.gradle.api.InvalidUserDataException +import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.publish.maven.MavenPublication @@ -43,13 +44,13 @@ import java.util.regex.Pattern /** * Encapsulates build configuration for an Elasticsearch plugin. */ -class PluginBuildPlugin extends BuildPlugin { +class PluginBuildPlugin implements Plugin { public static final String PLUGIN_EXTENSION_NAME = 'esplugin' @Override void apply(Project project) { - super.apply(project) + project.pluginManager.apply(BuildPlugin) PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project) configureDependencies(project) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 25218202bfc..f656f177ce6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -116,15 +116,13 @@ class PrecommitTasks { } private static Task configureThirdPartyAudit(Project project) { - ThirdPartyAuditTask thirdPartyAuditTask = project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') - thirdPartyAuditTask.configure { - dependsOn(buildResources) - signatureFile = buildResources.copy("forbidden/third-party-audit.txt") - javaHome = project.runtimeJavaHome - targetCompatibility = project.runtimeJavaVersion + return project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) { task -> + task.dependsOn(buildResources) + task.signatureFile = buildResources.copy("forbidden/third-party-audit.txt") + task.javaHome = project.runtimeJavaHome + task.targetCompatibility.set(project.provider({ project.runtimeJavaVersion })) } - return thirdPartyAuditTask } private static Task configureForbiddenApisCli(Project project) { @@ -132,16 +130,16 @@ class PrecommitTasks { ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') project.tasks.withType(CheckForbiddenApis) { dependsOn(buildResources) - targetCompatibility = project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 ? - project.runtimeJavaVersion.getMajorVersion() : project.runtimeJavaVersion - if (project.runtimeJavaVersion > JavaVersion.VERSION_11) { - doLast { + doFirst { + // we need to defer this configuration since we don't know the runtime java version until execution time + targetCompatibility = project.runtimeJavaVersion.getMajorVersion() + if (project.runtimeJavaVersion > JavaVersion.VERSION_11) { project.logger.info( "Forbidden APIs does not support java version past 11. Will use the signatures from 11 for ", project.runtimeJavaVersion ) + targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() } - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() } bundledSignatures = [ "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index a44869cf4c8..254d2502875 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -317,12 +317,6 @@ class ClusterFormationTasks { // its run after plugins have been installed, as the extra config files may belong to plugins setup = configureExtraConfigFilesTask(taskName(prefix, node, 'extraConfig'), project, setup, node) - // If the node runs in a FIPS 140-2 JVM, the BCFKS default keystore will be password protected - if (project.inFipsJvm){ - node.config.systemProperties.put('javax.net.ssl.trustStorePassword', 'password') - node.config.systemProperties.put('javax.net.ssl.keyStorePassword', 'password') - } - // extra setup commands for (Map.Entry command : node.config.setupCommands.entrySet()) { // the first argument is the actual script name, relative to home @@ -430,16 +424,17 @@ class ClusterFormationTasks { if (node.nodeVersion.major >= 7) { esConfig['indices.breaker.total.use_real_memory'] = false } - for (Map.Entry setting : node.config.settings) { - if (setting.value == null) { - esConfig.remove(setting.key) - } else { - esConfig.put(setting.key, setting.value) - } - } Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) writeConfig.doFirst { + for (Map.Entry setting : node.config.settings) { + if (setting.value == null) { + esConfig.remove(setting.key) + } else { + esConfig.put(setting.key, setting.value) + } + } + esConfig = configFilter.call(esConfig) File configFile = new File(node.pathConf, 'elasticsearch.yml') logger.info("Configuring ${configFile}") @@ -760,6 +755,12 @@ class ClusterFormationTasks { } start.doLast(elasticsearchRunner) start.doFirst { + // If the node runs in a FIPS 140-2 JVM, the BCFKS default keystore will be password protected + if (project.inFipsJvm){ + node.config.systemProperties.put('javax.net.ssl.trustStorePassword', 'password') + node.config.systemProperties.put('javax.net.ssl.keyStorePassword', 'password') + } + // Configure ES JAVA OPTS - adds system properties, assertion flags, remote debug etc List esJavaOpts = [node.env.get('ES_JAVA_OPTS', '')] String collectedSystemProperties = node.config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index ef784b6f901..0ded69756eb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -86,50 +86,23 @@ class RestIntegTestTask extends DefaultTask { runner.include('**/*IT.class') runner.systemProperty('tests.rest.load_packaged', 'false') - /* - * We use lazy-evaluated strings in order to configure system properties whose value will not be known until - * execution time (e.g. cluster port numbers). Adding these via the normal DSL doesn't work as these get treated - * as task inputs and therefore Gradle attempts to snapshot them before/after task execution. This fails due - * to the GStrings containing references to non-serializable objects. - * - * We bypass this by instead passing this system properties vi a CommandLineArgumentProvider. This has the added - * side-effect that these properties are NOT treated as inputs, therefore they don't influence things like the - * build cache key or up to date checking. - */ - def nonInputProperties = new CommandLineArgumentProvider() { - private final Map systemProperties = [:] - - void systemProperty(String key, Object value) { - systemProperties.put(key, value) - } - - @Override - Iterable asArguments() { - return systemProperties.collect { key, value -> - "-D${key}=${value.toString()}".toString() - } - } - } - runner.jvmArgumentProviders.add(nonInputProperties) - runner.ext.nonInputProperties = nonInputProperties - if (System.getProperty("tests.rest.cluster") == null) { if (System.getProperty("tests.cluster") != null) { throw new IllegalArgumentException("tests.rest.cluster and tests.cluster must both be null or non-null") } if (usesTestclusters == true) { ElasticsearchCluster cluster = project.testClusters."${name}" - nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",") }") - nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI }") + runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",") }") + runner.nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI }") } else { // we pass all nodes to the rest cluster to allow the clients to round-robin between them // this is more realistic than just talking to a single node - nonInputProperties.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}") - nonInputProperties.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}") + runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}") + runner.nonInputProperties.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}") // TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin // that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass // both as separate sysprops - nonInputProperties.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}") + runner.nonInputProperties.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}") // dump errors and warnings from cluster log on failure TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index fbd9fe01b9e..c9a26eb74b5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -27,11 +27,14 @@ import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.InvalidUserDataException +import org.gradle.api.JavaVersion import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.artifacts.Configuration +import org.gradle.api.plugins.ExtraPropertiesExtension import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.plugins.JavaPlugin +import org.gradle.api.plugins.JavaPluginExtension import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.SourceSetContainer import org.gradle.api.tasks.compile.JavaCompile @@ -57,11 +60,14 @@ class StandaloneRestTestPlugin implements Plugin { project.pluginManager.apply(JavaBasePlugin) project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) - BuildPlugin.globalBuildInfo(project) BuildPlugin.configureRepositories(project) BuildPlugin.configureTestTasks(project) BuildPlugin.configureInputNormalization(project) + ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) + project.extensions.getByType(JavaPluginExtension).sourceCompatibility = ext.get('minimumRuntimeVersion') as JavaVersion + project.extensions.getByType(JavaPluginExtension).targetCompatibility = ext.get('minimumRuntimeVersion') as JavaVersion + // only setup tests to build SourceSetContainer sourceSets = project.extensions.getByType(SourceSetContainer) SourceSet testSourceSet = sourceSets.create('test') diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/JdkJarHellCheck.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java similarity index 100% rename from buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/JdkJarHellCheck.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LazyFileOutputStream.java b/buildSrc/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java similarity index 100% rename from buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LazyFileOutputStream.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LoggedExec.java b/buildSrc/src/main/java/org/elasticsearch/gradle/LoggedExec.java similarity index 100% rename from buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LoggedExec.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/LoggedExec.java diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/Version.java b/buildSrc/src/main/java/org/elasticsearch/gradle/Version.java similarity index 100% rename from buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/Version.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/Version.java diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/VersionProperties.java b/buildSrc/src/main/java/org/elasticsearch/gradle/VersionProperties.java similarity index 100% rename from buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/VersionProperties.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/VersionProperties.java diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GenerateGlobalBuildInfoTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GenerateGlobalBuildInfoTask.java new file mode 100644 index 00000000000..8537775ee12 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GenerateGlobalBuildInfoTask.java @@ -0,0 +1,276 @@ +package org.elasticsearch.gradle.info; + +import org.elasticsearch.gradle.OS; +import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; +import org.gradle.api.JavaVersion; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.Nested; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.api.tasks.TaskAction; +import org.gradle.internal.jvm.Jvm; +import org.gradle.process.ExecResult; + +import javax.inject.Inject; +import java.io.BufferedWriter; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.io.Writer; +import java.nio.file.Files; +import java.util.Arrays; +import java.util.List; + +import static java.nio.charset.StandardCharsets.UTF_8; + +@CacheableTask +public class GenerateGlobalBuildInfoTask extends DefaultTask { + private JavaVersion minimumCompilerVersion; + private JavaVersion minimumRuntimeVersion; + private File compilerJavaHome; + private File runtimeJavaHome; + private List javaVersions; + private final RegularFileProperty outputFile; + private final RegularFileProperty compilerVersionFile; + private final RegularFileProperty runtimeVersionFile; + private final RegularFileProperty fipsJvmFile; + + @Inject + public GenerateGlobalBuildInfoTask(ObjectFactory objectFactory) { + this.outputFile = objectFactory.fileProperty(); + this.compilerVersionFile = objectFactory.fileProperty(); + this.runtimeVersionFile = objectFactory.fileProperty(); + this.fipsJvmFile = objectFactory.fileProperty(); + } + + @Input + public JavaVersion getMinimumCompilerVersion() { + return minimumCompilerVersion; + } + + public void setMinimumCompilerVersion(JavaVersion minimumCompilerVersion) { + this.minimumCompilerVersion = minimumCompilerVersion; + } + + @Input + public JavaVersion getMinimumRuntimeVersion() { + return minimumRuntimeVersion; + } + + public void setMinimumRuntimeVersion(JavaVersion minimumRuntimeVersion) { + this.minimumRuntimeVersion = minimumRuntimeVersion; + } + + @InputDirectory + @PathSensitive(PathSensitivity.RELATIVE) + public File getCompilerJavaHome() { + return compilerJavaHome; + } + + public void setCompilerJavaHome(File compilerJavaHome) { + this.compilerJavaHome = compilerJavaHome; + } + + @InputDirectory + @PathSensitive(PathSensitivity.RELATIVE) + public File getRuntimeJavaHome() { + return runtimeJavaHome; + } + + public void setRuntimeJavaHome(File runtimeJavaHome) { + this.runtimeJavaHome = runtimeJavaHome; + } + + @Nested + public List getJavaVersions() { + return javaVersions; + } + + public void setJavaVersions(List javaVersions) { + this.javaVersions = javaVersions; + } + + @OutputFile + public RegularFileProperty getOutputFile() { + return outputFile; + } + + @OutputFile + public RegularFileProperty getCompilerVersionFile() { + return compilerVersionFile; + } + + @OutputFile + public RegularFileProperty getRuntimeVersionFile() { + return runtimeVersionFile; + } + + @OutputFile + public RegularFileProperty getFipsJvmFile() { + return fipsJvmFile; + } + + @TaskAction + public void generate() { + String javaVendor = System.getProperty("java.vendor"); + String gradleJavaVersion = System.getProperty("java.version"); + String gradleJavaVersionDetails = javaVendor + " " + gradleJavaVersion + " [" + System.getProperty("java.vm.name") + + " " + System.getProperty("java.vm.version") + "]"; + + String compilerJavaVersionDetails = gradleJavaVersionDetails; + JavaVersion compilerJavaVersionEnum = JavaVersion.current(); + String runtimeJavaVersionDetails = gradleJavaVersionDetails; + JavaVersion runtimeJavaVersionEnum = JavaVersion.current(); + File gradleJavaHome = Jvm.current().getJavaHome(); + boolean inFipsJvm = false; + + try { + if (Files.isSameFile(compilerJavaHome.toPath(), gradleJavaHome.toPath()) == false) { + if (compilerJavaHome.exists()) { + compilerJavaVersionDetails = findJavaVersionDetails(compilerJavaHome); + compilerJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(compilerJavaHome)); + } else { + throw new RuntimeException("Compiler Java home path of '" + compilerJavaHome + "' does not exist"); + } + } + + if (Files.isSameFile(runtimeJavaHome.toPath(), gradleJavaHome.toPath()) == false) { + if (runtimeJavaHome.exists()) { + runtimeJavaVersionDetails = findJavaVersionDetails(runtimeJavaHome); + runtimeJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(runtimeJavaHome)); + + // We don't expect Gradle to be running in a FIPS JVM + String inFipsJvmScript = "print(java.security.Security.getProviders()[0].name.toLowerCase().contains(\"fips\"));"; + inFipsJvm = Boolean.parseBoolean(runJavaAsScript(runtimeJavaHome, inFipsJvmScript)); + } else { + throw new RuntimeException("Runtime Java home path of '" + compilerJavaHome + "' does not exist"); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + try (BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile.getAsFile().get()))) { + writer.write(" Gradle Version : " + getProject().getGradle().getGradleVersion() + "\n"); + writer.write(" OS Info : " + System.getProperty("os.name") + " " + System.getProperty("os.version") + + " (" + System.getProperty("os.arch") + ")\n"); + if (gradleJavaVersionDetails.equals(compilerJavaVersionDetails) == false + || gradleJavaVersionDetails.equals(runtimeJavaVersionDetails) == false) { + writer.write(" Compiler JDK Version : " + compilerJavaVersionEnum + " (" + compilerJavaVersionDetails + ")\n"); + writer.write(" Compiler java.home : " + compilerJavaHome + "\n"); + writer.write(" Runtime JDK Version : " + runtimeJavaVersionEnum + " (" + runtimeJavaVersionDetails + ")\n"); + writer.write(" Runtime java.home : " + runtimeJavaHome + "\n"); + writer.write(" Gradle JDK Version : " + JavaVersion.toVersion(gradleJavaVersion) + + " (" + gradleJavaVersionDetails + ")\n"); + writer.write(" Gradle java.home : " + gradleJavaHome); + } else { + writer.write(" JDK Version : " + JavaVersion.toVersion(gradleJavaVersion) + + " (" + gradleJavaVersionDetails + ")\n"); + writer.write(" JAVA_HOME : " + gradleJavaHome); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + // enforce Java version + if (compilerJavaVersionEnum.compareTo(minimumCompilerVersion) < 0) { + String message = "The compiler java.home must be set to a JDK installation directory for Java " + minimumCompilerVersion + + " but is [" + compilerJavaHome + "] corresponding to [" + compilerJavaVersionEnum + "]"; + throw new GradleException(message); + } + + if (runtimeJavaVersionEnum.compareTo(minimumRuntimeVersion) < 0) { + String message = "The runtime java.home must be set to a JDK installation directory for Java " + minimumRuntimeVersion + + " but is [" + runtimeJavaHome + "] corresponding to [" + runtimeJavaVersionEnum + "]"; + throw new GradleException(message); + } + + for (JavaHome javaVersion : javaVersions) { + File javaHome = javaVersion.getJavaHome(); + if (javaHome == null) { + continue; + } + JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(javaHome)); + JavaVersion expectedJavaVersionEnum; + int version = javaVersion.getVersion(); + if (version < 9) { + expectedJavaVersionEnum = JavaVersion.toVersion("1." + version); + } else { + expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version)); + } + if (javaVersionEnum != expectedJavaVersionEnum) { + String message = "The environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java " + + expectedJavaVersionEnum + " but is [" + javaHome + "] corresponding to [" + javaVersionEnum + "]"; + throw new GradleException(message); + } + } + + writeToFile(compilerVersionFile.getAsFile().get(), compilerJavaVersionEnum.name()); + writeToFile(runtimeVersionFile.getAsFile().get(), runtimeJavaVersionEnum.name()); + writeToFile(fipsJvmFile.getAsFile().get(), Boolean.toString(inFipsJvm)); + } + + private void writeToFile(File file, String content) { + try (Writer writer = new FileWriter(file)) { + writer.write(content); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + /** + * Finds printable java version of the given JAVA_HOME + */ + private String findJavaVersionDetails(File javaHome) { + String versionInfoScript = "print(" + + "java.lang.System.getProperty(\"java.vendor\") + \" \" + java.lang.System.getProperty(\"java.version\") + " + + "\" [\" + java.lang.System.getProperty(\"java.vm.name\") + \" \" + java.lang.System.getProperty(\"java.vm.version\") + \"]\");"; + return runJavaAsScript(javaHome, versionInfoScript).trim(); + } + + /** + * Finds the parsable java specification version + */ + private String findJavaSpecificationVersion(File javaHome) { + String versionScript = "print(java.lang.System.getProperty(\"java.specification.version\"));"; + return runJavaAsScript(javaHome, versionScript); + } + + /** + * Runs the given javascript using jjs from the jdk, and returns the output + */ + private String runJavaAsScript(File javaHome, String script) { + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + if (OS.current() == OS.WINDOWS) { + // gradle/groovy does not properly escape the double quote for windows + script = script.replace("\"", "\\\""); + } + File jrunscriptPath = new File(javaHome, "bin/jrunscript"); + String finalScript = script; + ExecResult result = getProject().exec(spec -> { + spec.setExecutable(jrunscriptPath); + spec.args("-e", finalScript); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + spec.setIgnoreExitValue(true); + }); + + if (result.getExitValue() != 0) { + getLogger().error("STDOUT:"); + Arrays.stream(stdout.toString(UTF_8).split(System.getProperty("line.separator"))).forEach(getLogger()::error); + getLogger().error("STDERR:"); + Arrays.stream(stderr.toString(UTF_8).split(System.getProperty("line.separator"))).forEach(getLogger()::error); + result.rethrowFailure(); + } + return stdout.toString(UTF_8).trim(); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java new file mode 100644 index 00000000000..f0f34e84261 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java @@ -0,0 +1,198 @@ +package org.elasticsearch.gradle.info; + +import org.elasticsearch.gradle.OS; +import org.gradle.api.GradleException; +import org.gradle.api.JavaVersion; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.internal.jvm.Jvm; + +import java.io.BufferedReader; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class GlobalBuildInfoPlugin implements Plugin { + private static final String GLOBAL_INFO_EXTENSION_NAME = "globalInfo"; + private static Integer _defaultParallel = null; + + @Override + public void apply(Project project) { + if (project != project.getRootProject()) { + throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project."); + } + + GlobalInfoExtension extension = project.getExtensions().create(GLOBAL_INFO_EXTENSION_NAME, GlobalInfoExtension.class); + + JavaVersion minimumCompilerVersion = JavaVersion.toVersion(getResourceContents("/minimumCompilerVersion")); + JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(getResourceContents("/minimumRuntimeVersion")); + + File compilerJavaHome = findCompilerJavaHome(); + File runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome); + + final List javaVersions = new ArrayList<>(); + for (int version = 8; version <= Integer.parseInt(minimumCompilerVersion.getMajorVersion()); version++) { + if (System.getenv(getJavaHomeEnvVarName(Integer.toString(version))) != null) { + javaVersions.add(JavaHome.of(version, new File(findJavaHome(Integer.toString(version))))); + } + } + + GenerateGlobalBuildInfoTask generateTask = project.getTasks().create("generateGlobalBuildInfo", + GenerateGlobalBuildInfoTask.class, task -> { + task.setJavaVersions(javaVersions); + task.setMinimumCompilerVersion(minimumCompilerVersion); + task.setMinimumRuntimeVersion(minimumRuntimeVersion); + task.setCompilerJavaHome(compilerJavaHome); + task.setRuntimeJavaHome(runtimeJavaHome); + task.getOutputFile().set(new File(project.getBuildDir(), "global-build-info")); + task.getCompilerVersionFile().set(new File(project.getBuildDir(), "java-compiler-version")); + task.getRuntimeVersionFile().set(new File(project.getBuildDir(), "java-runtime-version")); + task.getFipsJvmFile().set(new File(project.getBuildDir(), "in-fips-jvm")); + }); + + PrintGlobalBuildInfoTask printTask = project.getTasks().create("printGlobalBuildInfo", PrintGlobalBuildInfoTask.class, task -> { + task.getBuildInfoFile().set(generateTask.getOutputFile()); + task.getCompilerVersionFile().set(generateTask.getCompilerVersionFile()); + task.getRuntimeVersionFile().set(generateTask.getRuntimeVersionFile()); + task.getFipsJvmFile().set(generateTask.getFipsJvmFile()); + task.setGlobalInfoListeners(extension.listeners); + }); + + project.getExtensions().getByType(ExtraPropertiesExtension.class).set("defaultParallel", findDefaultParallel(project)); + + project.allprojects(p -> { + // Make sure than any task execution generates and prints build info + p.getTasks().all(task -> { + if (task != generateTask && task != printTask) { + task.dependsOn(printTask); + } + }); + + ExtraPropertiesExtension ext = p.getExtensions().getByType(ExtraPropertiesExtension.class); + + ext.set("compilerJavaHome", compilerJavaHome); + ext.set("runtimeJavaHome", runtimeJavaHome); + ext.set("isRuntimeJavaHomeSet", compilerJavaHome.equals(runtimeJavaHome) == false); + ext.set("javaVersions", javaVersions); + ext.set("minimumCompilerVersion", minimumCompilerVersion); + ext.set("minimumRuntimeVersion", minimumRuntimeVersion); + ext.set("gradleJavaVersion", Jvm.current().getJavaVersion()); + }); + } + + private static File findCompilerJavaHome() { + String compilerJavaHome = System.getenv("JAVA_HOME"); + String compilerJavaProperty = System.getProperty("compiler.java"); + + if (compilerJavaProperty != null) { + compilerJavaHome = findJavaHome(compilerJavaProperty); + } + + // if JAVA_HOME is not set,so we use the JDK that Gradle was run with. + return compilerJavaHome == null ? Jvm.current().getJavaHome() : new File(compilerJavaHome); + } + + private static File findRuntimeJavaHome(final File compilerJavaHome) { + String runtimeJavaProperty = System.getProperty("runtime.java"); + + if (runtimeJavaProperty != null) { + return new File(findJavaHome(runtimeJavaProperty)); + } + + return System.getenv("RUNTIME_JAVA_HOME") == null ? compilerJavaHome : new File(System.getenv("RUNTIME_JAVA_HOME")); + } + + private static String findJavaHome(String version) { + String versionedJavaHome = System.getenv(getJavaHomeEnvVarName(version)); + if (versionedJavaHome == null) { + throw new GradleException( + "$versionedVarName must be set to build Elasticsearch. " + + "Note that if the variable was just set you might have to run `./gradlew --stop` for " + + "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." + ); + } + return versionedJavaHome; + } + + private static String getJavaHomeEnvVarName(String version) { + return "JAVA" + version + "_HOME"; + } + + private static String getResourceContents(String resourcePath) { + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(GlobalBuildInfoPlugin.class.getResourceAsStream(resourcePath)) + )) { + StringBuilder b = new StringBuilder(); + for (String line = reader.readLine(); line != null; line = reader.readLine()) { + if (b.length() != 0) { + b.append('\n'); + } + b.append(line); + } + + return b.toString(); + } catch (IOException e) { + throw new UncheckedIOException("Error trying to read classpath resource: " + resourcePath, e); + } + } + + private static int findDefaultParallel(Project project) { + // Since it costs IO to compute this, and is done at configuration time we want to cache this if possible + // It's safe to store this in a static variable since it's just a primitive so leaking memory isn't an issue + if (_defaultParallel == null) { + File cpuInfoFile = new File("/proc/cpuinfo"); + if (cpuInfoFile.exists()) { + // Count physical cores on any Linux distro ( don't count hyper-threading ) + Map socketToCore = new HashMap<>(); + String currentID = ""; + + try (BufferedReader reader = new BufferedReader(new FileReader(cpuInfoFile))) { + for (String line = reader.readLine(); line != null; line = reader.readLine()) { + if (line.contains(":")) { + List parts = Arrays.stream(line.split(":", 2)).map(String::trim).collect(Collectors.toList()); + String name = parts.get(0); + String value = parts.get(1); + // the ID of the CPU socket + if (name.equals("physical id")) { + currentID = value; + } + // Number of cores not including hyper-threading + if (name.equals("cpu cores")) { + assert currentID.isEmpty() == false; + socketToCore.put("currentID", Integer.valueOf(value)); + currentID = ""; + } + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + _defaultParallel = socketToCore.values().stream().mapToInt(i -> i).sum(); + } else if (OS.current() == OS.MAC) { + // Ask macOS to count physical CPUs for us + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + project.exec(spec -> { + spec.setExecutable("sysctl"); + spec.args("-n", "hw.physicalcpu"); + spec.setStandardOutput(stdout); + }); + + _defaultParallel = Integer.parseInt(stdout.toString().trim()); + } + + _defaultParallel = Runtime.getRuntime().availableProcessors() / 2; + } + + return _defaultParallel; + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalInfoExtension.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalInfoExtension.java new file mode 100644 index 00000000000..a2daa4a5767 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalInfoExtension.java @@ -0,0 +1,12 @@ +package org.elasticsearch.gradle.info; + +import java.util.ArrayList; +import java.util.List; + +public class GlobalInfoExtension { + final List listeners = new ArrayList<>(); + + public void ready(Runnable block) { + listeners.add(block); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/JavaHome.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/JavaHome.java new file mode 100644 index 00000000000..29ca2bafc79 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/JavaHome.java @@ -0,0 +1,35 @@ +package org.elasticsearch.gradle.info; + +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; + +import java.io.File; + +public class JavaHome { + private Integer version; + private File javaHome; + + private JavaHome(int version, File javaHome) { + this.version = version; + this.javaHome = javaHome; + } + + public static JavaHome of(int version, File javaHome) { + return new JavaHome(version, javaHome); + } + + @Input + public Integer getVersion() { + return version; + } + + @InputDirectory + @Optional + @PathSensitive(PathSensitivity.RELATIVE) + public File getJavaHome() { + return javaHome; + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/PrintGlobalBuildInfoTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/PrintGlobalBuildInfoTask.java new file mode 100644 index 00000000000..b83fe29b073 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/PrintGlobalBuildInfoTask.java @@ -0,0 +1,84 @@ +package org.elasticsearch.gradle.info; + +import org.gradle.api.DefaultTask; +import org.gradle.api.JavaVersion; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.resources.TextResource; +import org.gradle.api.tasks.InputFile; +import org.gradle.api.tasks.TaskAction; + +import javax.inject.Inject; +import java.util.ArrayList; +import java.util.List; + +public class PrintGlobalBuildInfoTask extends DefaultTask { + private final RegularFileProperty buildInfoFile; + private final RegularFileProperty compilerVersionFile; + private final RegularFileProperty runtimeVersionFile; + private final RegularFileProperty fipsJvmFile; + private List globalInfoListeners = new ArrayList<>(); + + @Inject + public PrintGlobalBuildInfoTask(ObjectFactory objectFactory) { + this.buildInfoFile = objectFactory.fileProperty(); + this.compilerVersionFile = objectFactory.fileProperty(); + this.runtimeVersionFile = objectFactory.fileProperty(); + this.fipsJvmFile = objectFactory.fileProperty(); + } + + @InputFile + public RegularFileProperty getBuildInfoFile() { + return buildInfoFile; + } + + @InputFile + public RegularFileProperty getCompilerVersionFile() { + return compilerVersionFile; + } + + @InputFile + public RegularFileProperty getRuntimeVersionFile() { + return runtimeVersionFile; + } + + @InputFile + public RegularFileProperty getFipsJvmFile() { + return fipsJvmFile; + } + + public void setGlobalInfoListeners(List globalInfoListeners) { + this.globalInfoListeners = globalInfoListeners; + } + + @TaskAction + public void print() { + getLogger().quiet("======================================="); + getLogger().quiet("Elasticsearch Build Hamster says Hello!"); + getLogger().quiet(getFileText(getBuildInfoFile()).asString()); + getLogger().quiet(" Random Testing Seed : " + getProject().property("testSeed")); + getLogger().quiet("======================================="); + + setGlobalProperties(); + globalInfoListeners.forEach(Runnable::run); + + // Since all tasks depend on this task, and it always runs for every build, this makes sure that lifecycle tasks will still + // correctly report as UP-TO-DATE, since the convention is a lifecycle task (i.e. assemble, build, etc) will only be marked as + // UP-TO-DATE if all upstream tasks were also UP-TO-DATE. + setDidWork(false); + } + + private TextResource getFileText(RegularFileProperty regularFileProperty) { + return getProject().getResources().getText().fromFile(regularFileProperty.getAsFile().get()); + } + + private void setGlobalProperties() { + getProject().getRootProject().allprojects(p -> { + ExtraPropertiesExtension ext = p.getExtensions().getByType(ExtraPropertiesExtension.class); + ext.set("compilerJavaVersion", JavaVersion.valueOf(getFileText(getCompilerVersionFile()).asString())); + ext.set("runtimeJavaVersion", JavaVersion.valueOf(getFileText(getRuntimeVersionFile()).asString())); + ext.set("inFipsJvm", Boolean.valueOf(getFileText(getFipsJvmFile()).asString())); + }); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java index e73a9d1e585..7ddec2b887e 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java @@ -26,6 +26,7 @@ import org.gradle.api.JavaVersion; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.FileTree; +import org.gradle.api.provider.Property; import org.gradle.api.specs.Spec; import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.Classpath; @@ -79,17 +80,13 @@ public class ThirdPartyAuditTask extends DefaultTask { private String javaHome; - private JavaVersion targetCompatibility; + private final Property targetCompatibility = getProject().getObjects().property(JavaVersion.class); @Input - public JavaVersion getTargetCompatibility() { + public Property getTargetCompatibility() { return targetCompatibility; } - public void setTargetCompatibility(JavaVersion targetCompatibility) { - this.targetCompatibility = targetCompatibility; - } - @InputFiles @PathSensitive(PathSensitivity.NAME_ONLY) public Configuration getForbiddenAPIsConfiguration() { @@ -287,7 +284,7 @@ public class ThirdPartyAuditTask extends DefaultTask { // pther version specific implementation of said classes. IntStream.rangeClosed( Integer.parseInt(JavaVersion.VERSION_1_9.getMajorVersion()), - Integer.parseInt(targetCompatibility.getMajorVersion()) + Integer.parseInt(targetCompatibility.get().getMajorVersion()) ).forEach(majorVersion -> getProject().copy(spec -> { spec.from(getProject().zipTree(jar)); spec.into(jarExpandDir); diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.global-build-info.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.global-build-info.properties new file mode 100644 index 00000000000..74287078772 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.global-build-info.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.info.GlobalBuildInfoPlugin \ No newline at end of file diff --git a/buildSrc/src/testKit/elasticsearch.build/build.gradle b/buildSrc/src/testKit/elasticsearch.build/build.gradle index 7a68fe59baa..daac5bfe33e 100644 --- a/buildSrc/src/testKit/elasticsearch.build/build.gradle +++ b/buildSrc/src/testKit/elasticsearch.build/build.gradle @@ -29,6 +29,7 @@ forbiddenApisTest.enabled = false jarHell.enabled = false // we don't have tests for now test.enabled = false +thirdPartyAudit.enabled = false task hello { doFirst { diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index 61e3546ed89..48bc899cd29 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -40,8 +40,8 @@ test { systemProperty 'tests.security.manager', 'false' } -if (project.inFipsJvm) { +thirdPartyAudit.onlyIf { // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, // rather than provide a long list of exclusions, disable the check on FIPS. - thirdPartyAudit.enabled = false -} + project.inFipsJvm == false +} \ No newline at end of file diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 785e52db445..36c40f747d6 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -47,10 +47,12 @@ if (!isEclipse && !isIdea) { } forbiddenApisJava9 { - if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { - targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion() + rootProject.globalInfo.ready { + if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { + targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion() + } + replaceSignatureFiles 'jdk-signatures' } - replaceSignatureFiles 'jdk-signatures' } jar { diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 23de6a7f93b..d64e0aff774 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -172,10 +172,12 @@ thirdPartyAudit { ) } -if (project.inFipsJvm == false) { - // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in - // a FIPS JVM with BouncyCastleFIPS Provider - thirdPartyAudit.ignoreMissingClasses ( - 'org.bouncycastle.asn1.x500.X500Name' - ) +rootProject.globalInfo.ready { + if (project.inFipsJvm == false) { + // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in + // a FIPS JVM with BouncyCastleFIPS Provider + thirdPartyAudit.ignoreMissingClasses( + 'org.bouncycastle.asn1.x500.X500Name' + ) + } } diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 9cc113d8dc9..d2949371a74 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -135,119 +135,121 @@ thirdPartyAudit.ignoreMissingClasses ( ) // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) -if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { - thirdPartyAudit.ignoreJarHellWithJDK ( - 'javax.xml.bind.Binder', - 'javax.xml.bind.ContextFinder$1', - 'javax.xml.bind.ContextFinder', - 'javax.xml.bind.DataBindingException', - 'javax.xml.bind.DatatypeConverter', - 'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter', - 'javax.xml.bind.DatatypeConverterImpl', - 'javax.xml.bind.DatatypeConverterInterface', - 'javax.xml.bind.Element', - 'javax.xml.bind.GetPropertyAction', - 'javax.xml.bind.JAXB$Cache', - 'javax.xml.bind.JAXB', - 'javax.xml.bind.JAXBContext', - 'javax.xml.bind.JAXBElement$GlobalScope', - 'javax.xml.bind.JAXBElement', - 'javax.xml.bind.JAXBException', - 'javax.xml.bind.JAXBIntrospector', - 'javax.xml.bind.JAXBPermission', - 'javax.xml.bind.MarshalException', - 'javax.xml.bind.Marshaller$Listener', - 'javax.xml.bind.Marshaller', - 'javax.xml.bind.Messages', - 'javax.xml.bind.NotIdentifiableEvent', - 'javax.xml.bind.ParseConversionEvent', - 'javax.xml.bind.PrintConversionEvent', - 'javax.xml.bind.PropertyException', - 'javax.xml.bind.SchemaOutputResolver', - 'javax.xml.bind.TypeConstraintException', - 'javax.xml.bind.UnmarshalException', - 'javax.xml.bind.Unmarshaller$Listener', - 'javax.xml.bind.Unmarshaller', - 'javax.xml.bind.UnmarshallerHandler', - 'javax.xml.bind.ValidationEvent', - 'javax.xml.bind.ValidationEventHandler', - 'javax.xml.bind.ValidationEventLocator', - 'javax.xml.bind.ValidationException', - 'javax.xml.bind.Validator', - 'javax.xml.bind.WhiteSpaceProcessor', - 'javax.xml.bind.annotation.DomHandler', - 'javax.xml.bind.annotation.W3CDomHandler', - 'javax.xml.bind.annotation.XmlAccessOrder', - 'javax.xml.bind.annotation.XmlAccessType', - 'javax.xml.bind.annotation.XmlAccessorOrder', - 'javax.xml.bind.annotation.XmlAccessorType', - 'javax.xml.bind.annotation.XmlAnyAttribute', - 'javax.xml.bind.annotation.XmlAnyElement', - 'javax.xml.bind.annotation.XmlAttachmentRef', - 'javax.xml.bind.annotation.XmlAttribute', - 'javax.xml.bind.annotation.XmlElement$DEFAULT', - 'javax.xml.bind.annotation.XmlElement', - 'javax.xml.bind.annotation.XmlElementDecl$GLOBAL', - 'javax.xml.bind.annotation.XmlElementDecl', - 'javax.xml.bind.annotation.XmlElementRef$DEFAULT', - 'javax.xml.bind.annotation.XmlElementRef', - 'javax.xml.bind.annotation.XmlElementRefs', - 'javax.xml.bind.annotation.XmlElementWrapper', - 'javax.xml.bind.annotation.XmlElements', - 'javax.xml.bind.annotation.XmlEnum', - 'javax.xml.bind.annotation.XmlEnumValue', - 'javax.xml.bind.annotation.XmlID', - 'javax.xml.bind.annotation.XmlIDREF', - 'javax.xml.bind.annotation.XmlInlineBinaryData', - 'javax.xml.bind.annotation.XmlList', - 'javax.xml.bind.annotation.XmlMimeType', - 'javax.xml.bind.annotation.XmlMixed', - 'javax.xml.bind.annotation.XmlNs', - 'javax.xml.bind.annotation.XmlNsForm', - 'javax.xml.bind.annotation.XmlRegistry', - 'javax.xml.bind.annotation.XmlRootElement', - 'javax.xml.bind.annotation.XmlSchema', - 'javax.xml.bind.annotation.XmlSchemaType$DEFAULT', - 'javax.xml.bind.annotation.XmlSchemaType', - 'javax.xml.bind.annotation.XmlSchemaTypes', - 'javax.xml.bind.annotation.XmlSeeAlso', - 'javax.xml.bind.annotation.XmlTransient', - 'javax.xml.bind.annotation.XmlType$DEFAULT', - 'javax.xml.bind.annotation.XmlType', - 'javax.xml.bind.annotation.XmlValue', - 'javax.xml.bind.annotation.adapters.CollapsedStringAdapter', - 'javax.xml.bind.annotation.adapters.HexBinaryAdapter', - 'javax.xml.bind.annotation.adapters.NormalizedStringAdapter', - 'javax.xml.bind.annotation.adapters.XmlAdapter', - 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', - 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', - 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', - 'javax.xml.bind.attachment.AttachmentMarshaller', - 'javax.xml.bind.attachment.AttachmentUnmarshaller', - 'javax.xml.bind.helpers.AbstractMarshallerImpl', - 'javax.xml.bind.helpers.AbstractUnmarshallerImpl', - 'javax.xml.bind.helpers.DefaultValidationEventHandler', - 'javax.xml.bind.helpers.Messages', - 'javax.xml.bind.helpers.NotIdentifiableEventImpl', - 'javax.xml.bind.helpers.ParseConversionEventImpl', - 'javax.xml.bind.helpers.PrintConversionEventImpl', - 'javax.xml.bind.helpers.ValidationEventImpl', - 'javax.xml.bind.helpers.ValidationEventLocatorImpl', - 'javax.xml.bind.util.JAXBResult', - 'javax.xml.bind.util.JAXBSource$1', - 'javax.xml.bind.util.JAXBSource', - 'javax.xml.bind.util.Messages', - 'javax.xml.bind.util.ValidationEventCollector' - ) -} else { - thirdPartyAudit.ignoreMissingClasses ( - 'javax.activation.ActivationDataFlavor', - 'javax.activation.DataContentHandler', - 'javax.activation.DataHandler', - 'javax.activation.DataSource', - 'javax.activation.FileDataSource', - 'javax.activation.FileTypeMap', - 'javax.activation.MimeType', - 'javax.activation.MimeTypeParseException', - ) -} +rootProject.globalInfo.ready { + if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { + thirdPartyAudit.ignoreJarHellWithJDK( + 'javax.xml.bind.Binder', + 'javax.xml.bind.ContextFinder$1', + 'javax.xml.bind.ContextFinder', + 'javax.xml.bind.DataBindingException', + 'javax.xml.bind.DatatypeConverter', + 'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter', + 'javax.xml.bind.DatatypeConverterImpl', + 'javax.xml.bind.DatatypeConverterInterface', + 'javax.xml.bind.Element', + 'javax.xml.bind.GetPropertyAction', + 'javax.xml.bind.JAXB$Cache', + 'javax.xml.bind.JAXB', + 'javax.xml.bind.JAXBContext', + 'javax.xml.bind.JAXBElement$GlobalScope', + 'javax.xml.bind.JAXBElement', + 'javax.xml.bind.JAXBException', + 'javax.xml.bind.JAXBIntrospector', + 'javax.xml.bind.JAXBPermission', + 'javax.xml.bind.MarshalException', + 'javax.xml.bind.Marshaller$Listener', + 'javax.xml.bind.Marshaller', + 'javax.xml.bind.Messages', + 'javax.xml.bind.NotIdentifiableEvent', + 'javax.xml.bind.ParseConversionEvent', + 'javax.xml.bind.PrintConversionEvent', + 'javax.xml.bind.PropertyException', + 'javax.xml.bind.SchemaOutputResolver', + 'javax.xml.bind.TypeConstraintException', + 'javax.xml.bind.UnmarshalException', + 'javax.xml.bind.Unmarshaller$Listener', + 'javax.xml.bind.Unmarshaller', + 'javax.xml.bind.UnmarshallerHandler', + 'javax.xml.bind.ValidationEvent', + 'javax.xml.bind.ValidationEventHandler', + 'javax.xml.bind.ValidationEventLocator', + 'javax.xml.bind.ValidationException', + 'javax.xml.bind.Validator', + 'javax.xml.bind.WhiteSpaceProcessor', + 'javax.xml.bind.annotation.DomHandler', + 'javax.xml.bind.annotation.W3CDomHandler', + 'javax.xml.bind.annotation.XmlAccessOrder', + 'javax.xml.bind.annotation.XmlAccessType', + 'javax.xml.bind.annotation.XmlAccessorOrder', + 'javax.xml.bind.annotation.XmlAccessorType', + 'javax.xml.bind.annotation.XmlAnyAttribute', + 'javax.xml.bind.annotation.XmlAnyElement', + 'javax.xml.bind.annotation.XmlAttachmentRef', + 'javax.xml.bind.annotation.XmlAttribute', + 'javax.xml.bind.annotation.XmlElement$DEFAULT', + 'javax.xml.bind.annotation.XmlElement', + 'javax.xml.bind.annotation.XmlElementDecl$GLOBAL', + 'javax.xml.bind.annotation.XmlElementDecl', + 'javax.xml.bind.annotation.XmlElementRef$DEFAULT', + 'javax.xml.bind.annotation.XmlElementRef', + 'javax.xml.bind.annotation.XmlElementRefs', + 'javax.xml.bind.annotation.XmlElementWrapper', + 'javax.xml.bind.annotation.XmlElements', + 'javax.xml.bind.annotation.XmlEnum', + 'javax.xml.bind.annotation.XmlEnumValue', + 'javax.xml.bind.annotation.XmlID', + 'javax.xml.bind.annotation.XmlIDREF', + 'javax.xml.bind.annotation.XmlInlineBinaryData', + 'javax.xml.bind.annotation.XmlList', + 'javax.xml.bind.annotation.XmlMimeType', + 'javax.xml.bind.annotation.XmlMixed', + 'javax.xml.bind.annotation.XmlNs', + 'javax.xml.bind.annotation.XmlNsForm', + 'javax.xml.bind.annotation.XmlRegistry', + 'javax.xml.bind.annotation.XmlRootElement', + 'javax.xml.bind.annotation.XmlSchema', + 'javax.xml.bind.annotation.XmlSchemaType$DEFAULT', + 'javax.xml.bind.annotation.XmlSchemaType', + 'javax.xml.bind.annotation.XmlSchemaTypes', + 'javax.xml.bind.annotation.XmlSeeAlso', + 'javax.xml.bind.annotation.XmlTransient', + 'javax.xml.bind.annotation.XmlType$DEFAULT', + 'javax.xml.bind.annotation.XmlType', + 'javax.xml.bind.annotation.XmlValue', + 'javax.xml.bind.annotation.adapters.CollapsedStringAdapter', + 'javax.xml.bind.annotation.adapters.HexBinaryAdapter', + 'javax.xml.bind.annotation.adapters.NormalizedStringAdapter', + 'javax.xml.bind.annotation.adapters.XmlAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', + 'javax.xml.bind.attachment.AttachmentMarshaller', + 'javax.xml.bind.attachment.AttachmentUnmarshaller', + 'javax.xml.bind.helpers.AbstractMarshallerImpl', + 'javax.xml.bind.helpers.AbstractUnmarshallerImpl', + 'javax.xml.bind.helpers.DefaultValidationEventHandler', + 'javax.xml.bind.helpers.Messages', + 'javax.xml.bind.helpers.NotIdentifiableEventImpl', + 'javax.xml.bind.helpers.ParseConversionEventImpl', + 'javax.xml.bind.helpers.PrintConversionEventImpl', + 'javax.xml.bind.helpers.ValidationEventImpl', + 'javax.xml.bind.helpers.ValidationEventLocatorImpl', + 'javax.xml.bind.util.JAXBResult', + 'javax.xml.bind.util.JAXBSource$1', + 'javax.xml.bind.util.JAXBSource', + 'javax.xml.bind.util.Messages', + 'javax.xml.bind.util.ValidationEventCollector' + ) + } else { + thirdPartyAudit.ignoreMissingClasses( + 'javax.activation.ActivationDataFlavor', + 'javax.activation.DataContentHandler', + 'javax.activation.DataHandler', + 'javax.activation.DataSource', + 'javax.activation.FileDataSource', + 'javax.activation.FileTypeMap', + 'javax.activation.MimeType', + 'javax.activation.MimeTypeParseException', + ) + } +} \ No newline at end of file diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index e2bfad0df93..ecc8150c552 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -108,9 +108,11 @@ thirdPartyAudit.ignoreMissingClasses ( 'org.apache.log.Logger' ) -if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.ignoreMissingClasses ( - 'javax.xml.bind.DatatypeConverter', - 'javax.xml.bind.JAXBContext' - ) +rootProject.globalInfo.ready { + if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { + thirdPartyAudit.ignoreMissingClasses( + 'javax.xml.bind.DatatypeConverter', + 'javax.xml.bind.JAXBContext' + ) + } } diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index cbe417708d7..835147c255c 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -84,8 +84,8 @@ thirdPartyAudit{ ignoreMissingClasses() } -if (project.inFipsJvm) { - // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, - // rather than provide a long list of exclusions, disable the check on FIPS. - thirdPartyAudit.enabled = false -} +thirdPartyAudit.onlyIf { + // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, + // rather than provide a long list of exclusions, disable the check on FIPS. + project.inFipsJvm == false +} \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index ad24de0e093..ef72f66244b 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -93,8 +93,10 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}"); - if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { - miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') + rootProject.globalInfo.ready { + if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { + miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') + } } } // If it's an HA fixture, set a nameservice to use in the JVM options diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 531215c1ace..55b31a73b74 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -336,110 +336,112 @@ thirdPartyAudit.ignoreMissingClasses ( ) // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) -if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { - thirdPartyAudit.ignoreJarHellWithJDK ( - 'javax.xml.bind.Binder', - 'javax.xml.bind.ContextFinder$1', - 'javax.xml.bind.ContextFinder', - 'javax.xml.bind.DataBindingException', - 'javax.xml.bind.DatatypeConverter', - 'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter', - 'javax.xml.bind.DatatypeConverterImpl', - 'javax.xml.bind.DatatypeConverterInterface', - 'javax.xml.bind.Element', - 'javax.xml.bind.GetPropertyAction', - 'javax.xml.bind.JAXB$Cache', - 'javax.xml.bind.JAXB', - 'javax.xml.bind.JAXBContext', - 'javax.xml.bind.JAXBElement$GlobalScope', - 'javax.xml.bind.JAXBElement', - 'javax.xml.bind.JAXBException', - 'javax.xml.bind.JAXBIntrospector', - 'javax.xml.bind.JAXBPermission', - 'javax.xml.bind.MarshalException', - 'javax.xml.bind.Marshaller$Listener', - 'javax.xml.bind.Marshaller', - 'javax.xml.bind.Messages', - 'javax.xml.bind.NotIdentifiableEvent', - 'javax.xml.bind.ParseConversionEvent', - 'javax.xml.bind.PrintConversionEvent', - 'javax.xml.bind.PropertyException', - 'javax.xml.bind.SchemaOutputResolver', - 'javax.xml.bind.TypeConstraintException', - 'javax.xml.bind.UnmarshalException', - 'javax.xml.bind.Unmarshaller$Listener', - 'javax.xml.bind.Unmarshaller', - 'javax.xml.bind.UnmarshallerHandler', - 'javax.xml.bind.ValidationEvent', - 'javax.xml.bind.ValidationEventHandler', - 'javax.xml.bind.ValidationEventLocator', - 'javax.xml.bind.ValidationException', - 'javax.xml.bind.Validator', - 'javax.xml.bind.WhiteSpaceProcessor', - 'javax.xml.bind.annotation.DomHandler', - 'javax.xml.bind.annotation.W3CDomHandler', - 'javax.xml.bind.annotation.XmlAccessOrder', - 'javax.xml.bind.annotation.XmlAccessType', - 'javax.xml.bind.annotation.XmlAccessorOrder', - 'javax.xml.bind.annotation.XmlAccessorType', - 'javax.xml.bind.annotation.XmlAnyAttribute', - 'javax.xml.bind.annotation.XmlAnyElement', - 'javax.xml.bind.annotation.XmlAttachmentRef', - 'javax.xml.bind.annotation.XmlAttribute', - 'javax.xml.bind.annotation.XmlElement$DEFAULT', - 'javax.xml.bind.annotation.XmlElement', - 'javax.xml.bind.annotation.XmlElementDecl$GLOBAL', - 'javax.xml.bind.annotation.XmlElementDecl', - 'javax.xml.bind.annotation.XmlElementRef$DEFAULT', - 'javax.xml.bind.annotation.XmlElementRef', - 'javax.xml.bind.annotation.XmlElementRefs', - 'javax.xml.bind.annotation.XmlElementWrapper', - 'javax.xml.bind.annotation.XmlElements', - 'javax.xml.bind.annotation.XmlEnum', - 'javax.xml.bind.annotation.XmlEnumValue', - 'javax.xml.bind.annotation.XmlID', - 'javax.xml.bind.annotation.XmlIDREF', - 'javax.xml.bind.annotation.XmlInlineBinaryData', - 'javax.xml.bind.annotation.XmlList', - 'javax.xml.bind.annotation.XmlMimeType', - 'javax.xml.bind.annotation.XmlMixed', - 'javax.xml.bind.annotation.XmlNs', - 'javax.xml.bind.annotation.XmlNsForm', - 'javax.xml.bind.annotation.XmlRegistry', - 'javax.xml.bind.annotation.XmlRootElement', - 'javax.xml.bind.annotation.XmlSchema', - 'javax.xml.bind.annotation.XmlSchemaType$DEFAULT', - 'javax.xml.bind.annotation.XmlSchemaType', - 'javax.xml.bind.annotation.XmlSchemaTypes', - 'javax.xml.bind.annotation.XmlSeeAlso', - 'javax.xml.bind.annotation.XmlTransient', - 'javax.xml.bind.annotation.XmlType$DEFAULT', - 'javax.xml.bind.annotation.XmlType', - 'javax.xml.bind.annotation.XmlValue', - 'javax.xml.bind.annotation.adapters.CollapsedStringAdapter', - 'javax.xml.bind.annotation.adapters.HexBinaryAdapter', - 'javax.xml.bind.annotation.adapters.NormalizedStringAdapter', - 'javax.xml.bind.annotation.adapters.XmlAdapter', - 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', - 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', - 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', - 'javax.xml.bind.attachment.AttachmentMarshaller', - 'javax.xml.bind.attachment.AttachmentUnmarshaller', - 'javax.xml.bind.helpers.AbstractMarshallerImpl', - 'javax.xml.bind.helpers.AbstractUnmarshallerImpl', - 'javax.xml.bind.helpers.DefaultValidationEventHandler', - 'javax.xml.bind.helpers.Messages', - 'javax.xml.bind.helpers.NotIdentifiableEventImpl', - 'javax.xml.bind.helpers.ParseConversionEventImpl', - 'javax.xml.bind.helpers.PrintConversionEventImpl', - 'javax.xml.bind.helpers.ValidationEventImpl', - 'javax.xml.bind.helpers.ValidationEventLocatorImpl', - 'javax.xml.bind.util.JAXBResult', - 'javax.xml.bind.util.JAXBSource$1', - 'javax.xml.bind.util.JAXBSource', - 'javax.xml.bind.util.Messages', - 'javax.xml.bind.util.ValidationEventCollector' - ) -} else { - thirdPartyAudit.ignoreMissingClasses 'javax.activation.DataHandler' +rootProject.globalInfo.ready { + if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { + thirdPartyAudit.ignoreJarHellWithJDK( + 'javax.xml.bind.Binder', + 'javax.xml.bind.ContextFinder$1', + 'javax.xml.bind.ContextFinder', + 'javax.xml.bind.DataBindingException', + 'javax.xml.bind.DatatypeConverter', + 'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter', + 'javax.xml.bind.DatatypeConverterImpl', + 'javax.xml.bind.DatatypeConverterInterface', + 'javax.xml.bind.Element', + 'javax.xml.bind.GetPropertyAction', + 'javax.xml.bind.JAXB$Cache', + 'javax.xml.bind.JAXB', + 'javax.xml.bind.JAXBContext', + 'javax.xml.bind.JAXBElement$GlobalScope', + 'javax.xml.bind.JAXBElement', + 'javax.xml.bind.JAXBException', + 'javax.xml.bind.JAXBIntrospector', + 'javax.xml.bind.JAXBPermission', + 'javax.xml.bind.MarshalException', + 'javax.xml.bind.Marshaller$Listener', + 'javax.xml.bind.Marshaller', + 'javax.xml.bind.Messages', + 'javax.xml.bind.NotIdentifiableEvent', + 'javax.xml.bind.ParseConversionEvent', + 'javax.xml.bind.PrintConversionEvent', + 'javax.xml.bind.PropertyException', + 'javax.xml.bind.SchemaOutputResolver', + 'javax.xml.bind.TypeConstraintException', + 'javax.xml.bind.UnmarshalException', + 'javax.xml.bind.Unmarshaller$Listener', + 'javax.xml.bind.Unmarshaller', + 'javax.xml.bind.UnmarshallerHandler', + 'javax.xml.bind.ValidationEvent', + 'javax.xml.bind.ValidationEventHandler', + 'javax.xml.bind.ValidationEventLocator', + 'javax.xml.bind.ValidationException', + 'javax.xml.bind.Validator', + 'javax.xml.bind.WhiteSpaceProcessor', + 'javax.xml.bind.annotation.DomHandler', + 'javax.xml.bind.annotation.W3CDomHandler', + 'javax.xml.bind.annotation.XmlAccessOrder', + 'javax.xml.bind.annotation.XmlAccessType', + 'javax.xml.bind.annotation.XmlAccessorOrder', + 'javax.xml.bind.annotation.XmlAccessorType', + 'javax.xml.bind.annotation.XmlAnyAttribute', + 'javax.xml.bind.annotation.XmlAnyElement', + 'javax.xml.bind.annotation.XmlAttachmentRef', + 'javax.xml.bind.annotation.XmlAttribute', + 'javax.xml.bind.annotation.XmlElement$DEFAULT', + 'javax.xml.bind.annotation.XmlElement', + 'javax.xml.bind.annotation.XmlElementDecl$GLOBAL', + 'javax.xml.bind.annotation.XmlElementDecl', + 'javax.xml.bind.annotation.XmlElementRef$DEFAULT', + 'javax.xml.bind.annotation.XmlElementRef', + 'javax.xml.bind.annotation.XmlElementRefs', + 'javax.xml.bind.annotation.XmlElementWrapper', + 'javax.xml.bind.annotation.XmlElements', + 'javax.xml.bind.annotation.XmlEnum', + 'javax.xml.bind.annotation.XmlEnumValue', + 'javax.xml.bind.annotation.XmlID', + 'javax.xml.bind.annotation.XmlIDREF', + 'javax.xml.bind.annotation.XmlInlineBinaryData', + 'javax.xml.bind.annotation.XmlList', + 'javax.xml.bind.annotation.XmlMimeType', + 'javax.xml.bind.annotation.XmlMixed', + 'javax.xml.bind.annotation.XmlNs', + 'javax.xml.bind.annotation.XmlNsForm', + 'javax.xml.bind.annotation.XmlRegistry', + 'javax.xml.bind.annotation.XmlRootElement', + 'javax.xml.bind.annotation.XmlSchema', + 'javax.xml.bind.annotation.XmlSchemaType$DEFAULT', + 'javax.xml.bind.annotation.XmlSchemaType', + 'javax.xml.bind.annotation.XmlSchemaTypes', + 'javax.xml.bind.annotation.XmlSeeAlso', + 'javax.xml.bind.annotation.XmlTransient', + 'javax.xml.bind.annotation.XmlType$DEFAULT', + 'javax.xml.bind.annotation.XmlType', + 'javax.xml.bind.annotation.XmlValue', + 'javax.xml.bind.annotation.adapters.CollapsedStringAdapter', + 'javax.xml.bind.annotation.adapters.HexBinaryAdapter', + 'javax.xml.bind.annotation.adapters.NormalizedStringAdapter', + 'javax.xml.bind.annotation.adapters.XmlAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', + 'javax.xml.bind.attachment.AttachmentMarshaller', + 'javax.xml.bind.attachment.AttachmentUnmarshaller', + 'javax.xml.bind.helpers.AbstractMarshallerImpl', + 'javax.xml.bind.helpers.AbstractUnmarshallerImpl', + 'javax.xml.bind.helpers.DefaultValidationEventHandler', + 'javax.xml.bind.helpers.Messages', + 'javax.xml.bind.helpers.NotIdentifiableEventImpl', + 'javax.xml.bind.helpers.ParseConversionEventImpl', + 'javax.xml.bind.helpers.PrintConversionEventImpl', + 'javax.xml.bind.helpers.ValidationEventImpl', + 'javax.xml.bind.helpers.ValidationEventLocatorImpl', + 'javax.xml.bind.util.JAXBResult', + 'javax.xml.bind.util.JAXBSource$1', + 'javax.xml.bind.util.JAXBSource', + 'javax.xml.bind.util.Messages', + 'javax.xml.bind.util.ValidationEventCollector' + ) + } else { + thirdPartyAudit.ignoreMissingClasses 'javax.activation.DataHandler' + } } diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index 9f93d18a0e1..7800ff6951a 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -149,10 +149,12 @@ thirdPartyAudit { 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator' ) } -if (project.inFipsJvm == false) { - // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in - // a FIPS JVM with BouncyCastleFIPS Provider - thirdPartyAudit.ignoreMissingClasses ( - 'org.bouncycastle.asn1.x500.X500Name' - ) +rootProject.globalInfo.ready { + if (project.inFipsJvm == false) { + // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in + // a FIPS JVM with BouncyCastleFIPS Provider + thirdPartyAudit.ignoreMissingClasses( + 'org.bouncycastle.asn1.x500.X500Name' + ) + } } diff --git a/server/build.gradle b/server/build.gradle index bff5353f37f..6e0353eae7f 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -56,8 +56,10 @@ if (!isEclipse && !isIdea) { } forbiddenApisJava9 { - if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { - targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion() + doFirst { + if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { + targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion() + } } } @@ -314,8 +316,10 @@ thirdPartyAudit.ignoreMissingClasses ( 'com.google.common.geometry.S2LatLng' ) -if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.ignoreMissingClasses 'javax.xml.bind.DatatypeConverter' +rootProject.globalInfo.ready { + if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { + thirdPartyAudit.ignoreMissingClasses 'javax.xml.bind.DatatypeConverter' + } } dependencyLicenses { diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index 8501de714fa..cace98d97b0 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -41,7 +41,7 @@ followClusterTestRunner { task followClusterRestartTest(type: RestIntegTestTask) {} followClusterRestartTestCluster { - dependsOn followClusterTestRunner + dependsOn followClusterTestRunner, 'followClusterTestCluster#stop' numNodes = 1 clusterName = 'follow-cluster' dataDir = { nodeNumber -> followClusterTest.nodes[0].dataDir } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index e343b5906e7..e7e9c0fa71f 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -286,20 +286,22 @@ thirdPartyAudit { ) } -if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.ignoreMissingClasses( - 'javax.xml.bind.JAXBContext', - 'javax.xml.bind.JAXBElement', - 'javax.xml.bind.JAXBException', - 'javax.xml.bind.Unmarshaller', - 'javax.xml.bind.UnmarshallerHandler', - 'javax.activation.ActivationDataFlavor', - 'javax.activation.DataContentHandler', - 'javax.activation.DataHandler', - 'javax.activation.DataSource', - 'javax.activation.FileDataSource', - 'javax.activation.FileTypeMap' - ) +rootProject.globalInfo.ready { + if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { + thirdPartyAudit.ignoreMissingClasses( + 'javax.xml.bind.JAXBContext', + 'javax.xml.bind.JAXBElement', + 'javax.xml.bind.JAXBException', + 'javax.xml.bind.Unmarshaller', + 'javax.xml.bind.UnmarshallerHandler', + 'javax.activation.ActivationDataFlavor', + 'javax.activation.DataContentHandler', + 'javax.activation.DataHandler', + 'javax.activation.DataSource', + 'javax.activation.FileDataSource', + 'javax.activation.FileTypeMap' + ) + } } test { diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 00321c77808..205815bda8c 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -19,16 +19,18 @@ dependencyLicenses { mapping from: /bc.*/, to: 'bouncycastle' } -if (project.inFipsJvm) { - test.enabled = false - testingConventions.enabled = false - // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are - // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. - tasks.withType(CheckForbiddenApis) { - bundledSignatures -= "jdk-non-portable" - } - // FIPS JVM includes many classes from bouncycastle which count as jar hell for the third party audit, - // rather than provide a long list of exclusions, disable the check on FIPS. - thirdPartyAudit.enabled = false +rootProject.globalInfo.ready { + if (project.inFipsJvm) { + test.enabled = false + testingConventions.enabled = false + // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are + // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. + tasks.withType(CheckForbiddenApis) { + bundledSignatures -= "jdk-non-portable" + } + // FIPS JVM includes many classes from bouncycastle which count as jar hell for the third party audit, + // rather than provide a long list of exclusions, disable the check on FIPS. + thirdPartyAudit.enabled = false -} + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/security/with-ssl/build.gradle index de4e1734636..19459bade97 100644 --- a/x-pack/plugin/sql/qa/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/security/with-ssl/build.gradle @@ -207,18 +207,16 @@ integTestCluster { return tmpFile.exists() } } -Closure notRunningFips = { - Boolean.parseBoolean(BuildPlugin.runJavaAsScript(project, project.runtimeJavaHome, - 'print(java.security.Security.getProviders()[0].name.toLowerCase().contains("fips"));')) == false -} // Do not attempt to form a cluster in a FIPS JVM, as doing so with a JKS keystore will fail. // TODO Revisit this when SQL CLI client can handle key/certificate instead of only Keystores. // https://github.com/elastic/elasticsearch/issues/32306 -tasks.matching({ it.name == "integTestCluster#init" }).all { onlyIf notRunningFips } -tasks.matching({ it.name == "integTestCluster#start" }).all { onlyIf notRunningFips } -tasks.matching({ it.name == "integTestCluster#wait" }).all { onlyIf notRunningFips } -tasks.matching({ it.name == "integTestRunner" }).all { onlyIf notRunningFips } +tasks.matching { it.name in ["integTestCluster#init", "integTestCluster#start", "integTestCluster#wait", "integTestRunner"] }.all { + onlyIf { + project.inFipsJvm == false + } +} + /** A lazy evaluator to find the san to use for certificate generation. */ class SanEvaluator { diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index 09660e336e8..e236b75ee2c 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -70,45 +70,47 @@ thirdPartyAudit { } // pulled in as external dependency to work on java 9 -if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { - thirdPartyAudit.ignoreJarHellWithJDK ( - // pulled in as external dependency to work on java 9 - 'com.sun.activation.registries.LineTokenizer', - 'com.sun.activation.registries.LogSupport', - 'com.sun.activation.registries.MailcapFile', - 'com.sun.activation.registries.MailcapTokenizer', - 'com.sun.activation.registries.MimeTypeEntry', - 'com.sun.activation.registries.MimeTypeFile', - 'javax.activation.MailcapCommandMap', - 'javax.activation.MimetypesFileTypeMap', +rootProject.globalInfo.ready { + if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { + thirdPartyAudit.ignoreJarHellWithJDK( + // pulled in as external dependency to work on java 9 + 'com.sun.activation.registries.LineTokenizer', + 'com.sun.activation.registries.LogSupport', + 'com.sun.activation.registries.MailcapFile', + 'com.sun.activation.registries.MailcapTokenizer', + 'com.sun.activation.registries.MimeTypeEntry', + 'com.sun.activation.registries.MimeTypeFile', + 'javax.activation.MailcapCommandMap', + 'javax.activation.MimetypesFileTypeMap', - 'com.sun.activation.registries.MailcapParseException', - 'javax.activation.ActivationDataFlavor', - 'javax.activation.CommandInfo', - 'javax.activation.CommandMap', - 'javax.activation.CommandObject', - 'javax.activation.DataContentHandler', - 'javax.activation.DataContentHandlerFactory', - 'javax.activation.DataHandler$1', - 'javax.activation.DataHandler', - 'javax.activation.DataHandlerDataSource', - 'javax.activation.DataSource', - 'javax.activation.DataSourceDataContentHandler', - 'javax.activation.FileDataSource', - 'javax.activation.FileTypeMap', - 'javax.activation.MimeType', - 'javax.activation.MimeTypeParameterList', - 'javax.activation.MimeTypeParseException', - 'javax.activation.ObjectDataContentHandler', - 'javax.activation.SecuritySupport$1', - 'javax.activation.SecuritySupport$2', - 'javax.activation.SecuritySupport$3', - 'javax.activation.SecuritySupport$4', - 'javax.activation.SecuritySupport$5', - 'javax.activation.SecuritySupport', - 'javax.activation.URLDataSource', - 'javax.activation.UnsupportedDataTypeException' - ) + 'com.sun.activation.registries.MailcapParseException', + 'javax.activation.ActivationDataFlavor', + 'javax.activation.CommandInfo', + 'javax.activation.CommandMap', + 'javax.activation.CommandObject', + 'javax.activation.DataContentHandler', + 'javax.activation.DataContentHandlerFactory', + 'javax.activation.DataHandler$1', + 'javax.activation.DataHandler', + 'javax.activation.DataHandlerDataSource', + 'javax.activation.DataSource', + 'javax.activation.DataSourceDataContentHandler', + 'javax.activation.FileDataSource', + 'javax.activation.FileTypeMap', + 'javax.activation.MimeType', + 'javax.activation.MimeTypeParameterList', + 'javax.activation.MimeTypeParseException', + 'javax.activation.ObjectDataContentHandler', + 'javax.activation.SecuritySupport$1', + 'javax.activation.SecuritySupport$2', + 'javax.activation.SecuritySupport$3', + 'javax.activation.SecuritySupport$4', + 'javax.activation.SecuritySupport$5', + 'javax.activation.SecuritySupport', + 'javax.activation.URLDataSource', + 'javax.activation.UnsupportedDataTypeException' + ) + } } test { diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 7f0e14d2a53..70767faf334 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -116,13 +116,15 @@ for (Version version : bwcVersions.indexCompatible) { setting 'xpack.security.enabled', 'true' setting 'xpack.security.transport.ssl.enabled', 'true' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + rootProject.globalInfo.ready { + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + } else { + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + } } setting 'xpack.license.self_generated.type', 'trial' dependsOn copyTestNodeKeyMaterial @@ -160,13 +162,15 @@ for (Version version : bwcVersions.indexCompatible) { // some tests rely on the translog not being flushed setting 'indices.memory.shard_inactive_time', '20m' setting 'xpack.security.enabled', 'true' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + rootProject.globalInfo.ready { + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + } else { + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + } } setting 'xpack.license.self_generated.type', 'trial' dependsOn copyTestNodeKeyMaterial diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index 7cbdfae5ed4..b0ae65b3448 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -38,8 +38,10 @@ integTestCluster { setting 'reindex.ssl.truststore.password', 'password' // Workaround for JDK-8212885 - if (project.ext.runtimeJavaVersion.isJava12Compatible() == false) { - setting 'reindex.ssl.supported_protocols', 'TLSv1.2' + rootProject.globalInfo.ready { + if (project.ext.runtimeJavaVersion.isJava12Compatible() == false) { + setting 'reindex.ssl.supported_protocols', 'TLSv1.2' + } } extraConfigFile 'roles.yml', 'roles.yml' diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 58dac6b8f25..471503e385d 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -123,13 +123,15 @@ for (Version version : bwcVersions.wireCompatible) { setting 'xpack.security.authc.token.timeout', '60m' setting 'logger.org.elasticsearch.xpack.security.authc.TokenService', 'trace' setting 'xpack.security.audit.enabled', 'true' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + rootProject.globalInfo.ready { + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + } else { + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + } } dependsOn copyTestNodeKeyMaterial extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') @@ -190,13 +192,15 @@ for (Version version : bwcVersions.wireCompatible) { setting 'xpack.security.transport.ssl.enabled', 'true' setting 'xpack.security.authc.token.timeout', '60m' setting 'logger.org.elasticsearch.xpack.security.authc.TokenService', 'trace' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + rootProject.globalInfo.ready { + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + } else { + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + } } setting 'node.attr.upgraded', 'true' setting 'xpack.security.authc.token.enabled', 'true' From 1bb505c70dcd7a918bc4e96908a0946cc37310de Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 30 May 2019 09:23:38 -0700 Subject: [PATCH 012/210] Clarify the settings around limiting nested mappings. (#42686) * Previously, we mentioned multiple times that each nested object was indexed as its own document. This is repetitive, and is also a bit confusing in the context of `index.mapping.nested_fields.limit`, as that applies to the number of distinct `nested` types in the mappings, not the number of nested objects. We now just describe the issue once at the beginning of the section, to illustrate why `nested` types can be expensive. * Reference the ongoing example to clarify the meaning of the two settings. Addresses #28363. --- docs/reference/mapping.asciidoc | 10 ++---- docs/reference/mapping/types/nested.asciidoc | 38 +++++++++++++------- 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/docs/reference/mapping.asciidoc b/docs/reference/mapping.asciidoc index 2e09a0a8ca2..d0a3c6e06cd 100644 --- a/docs/reference/mapping.asciidoc +++ b/docs/reference/mapping.asciidoc @@ -87,15 +87,11 @@ causing a mapping explosion: `2`, etc. The default is `20`. `index.mapping.nested_fields.limit`:: - The maximum number of `nested` fields in an index, defaults to `50`. - Indexing 1 document with 100 nested fields actually indexes 101 documents - as each nested document is indexed as a separate hidden document. + The maximum number of distinct `nested` mappings in an index, defaults to `50`. `index.mapping.nested_objects.limit`:: - The maximum number of `nested` json objects within a single document across - all nested fields, defaults to 10000. Indexing one document with an array of - 100 objects within a nested field, will actually create 101 documents, as - each nested object will be indexed as a separate hidden document. + The maximum number of `nested` JSON objects within a single document across + all nested types, defaults to 10000. `index.mapping.field_name_length.limit`:: Setting for the maximum length of a field name. The default value is diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index fe150a69b49..de0f3f2a5f1 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -193,20 +193,32 @@ phase. Instead, highlighting needs to be performed via ============================================= -[[limit-number-nested-fields]] -==== Limiting the number of `nested` fields +[float] +=== Limits on `nested` mappings and objects -Indexing a document with 100 nested fields actually indexes 101 documents as each nested -document is indexed as a separate document. To safeguard against ill-defined mappings -the number of nested fields that can be defined per index has been limited to 50. See -<>. +As described earlier, each nested object is indexed as a separate document under the hood. +Continuing with the example above, if we indexed a single document containing 100 `user` objects, +then 101 Lucene documents would be created -- one for the parent document, and one for each +nested object. Because of the expense associated with `nested` mappings, Elasticsearch puts +settings in place to guard against performance problems: + +`index.mapping.nested_fields.limit`:: + + The `nested` type should only be used in special cases, when arrays of objects need to be + queried independently of each other. To safeguard against poorly designed mappings, this setting + limits the number of unique `nested` types per index. In our example, the `user` mapping would + count as only 1 towards this limit. Defaults to 50. + +`index.mapping.nested_objects.limit`:: + + This setting limits the number of nested objects that a single document may contain across all + `nested` types, in order to prevent out of memory errors when a document contains too many nested + objects. To illustrate how the setting works, say we added another `nested` type called `comments` + to our example mapping above. Then for each document, the combined number of `user` and `comment` + objects it contains must be below the limit. Defaults to 10000. + +Additional background on these settings, including information on their default values, can be found +in <>. -[[limit-nested-json-objects-number]] -==== Limiting the number of `nested` json objects -Indexing a document with an array of 100 objects within a nested field, will actually -create 101 documents, as each nested object will be indexed as a separate document. -To prevent out of memory errors when a single document contains too many nested json -objects, the number of nested json objects that a single document may contain across all fields -has been limited to 10000. See <>. From d83b91d56a82ad76bc446cf9c0f725718f6a29c1 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 30 May 2019 12:12:37 -0700 Subject: [PATCH 013/210] [DOCS] Disable Metricbeat system module (#42601) --- .../monitoring/configuring-metricbeat.asciidoc | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/reference/monitoring/configuring-metricbeat.asciidoc b/docs/reference/monitoring/configuring-metricbeat.asciidoc index 265eba5d480..0a3dad3f378 100644 --- a/docs/reference/monitoring/configuring-metricbeat.asciidoc +++ b/docs/reference/monitoring/configuring-metricbeat.asciidoc @@ -146,6 +146,23 @@ file. // end::remote-monitoring-user[] -- +. Optional: Disable the system module in {metricbeat}. ++ +-- +// tag::disable-system-module[] +By default, the {metricbeat-ref}/metricbeat-module-system.html[system module] is +enabled. The information it collects, however, is not shown on the *Monitoring* +page in {kib}. Unless you want to use that information for other purposes, run +the following command: + +["source","sh",subs="attributes,callouts"] +---------------------------------------------------------------------- +metricbeat modules disable system +---------------------------------------------------------------------- + +// end::disable-system-module[] +-- + . Identify where to send the monitoring data. + + -- From 371cb9a8ce97b09e36b7139a307e324caee176f1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 30 May 2019 16:06:11 -0400 Subject: [PATCH 014/210] Remove Log4j 1.2 API as a dependency (#42702) We had this as a dependency for legacy dependencies that still needed the Log4j 1.2 API. This appears to no longer be necessary, so this commit removes this artifact as a dependency. To remove this dependency, we had to fix a few places where we were accidentally relying on Log4j 1.2 instead of Log4j 2 (easy to do, since both APIs were on the compile-time classpath). Finally, we can remove our custom Netty logger factory. This was needed when we were on Log4j 1.2 and handled logging in our own unique way. When we migrated to Log4j 2 we could have dropped this dependency. However, even then Netty would still pick up Log4j 1.2 since it was on the classpath, thus the advantage to removing this as a dependency now. --- modules/transport-netty4/build.gradle | 4 + .../netty4/Netty4HttpServerTransport.java | 4 - .../netty4/Netty4InternalESLogger.java | 187 ---------------- .../transport/netty4/Netty4Transport.java | 4 - .../transport/netty4/Netty4Utils.java | 17 -- plugins/discovery-azure-classic/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 0 .../licenses/log4j-LICENSE.txt | 0 .../licenses/log4j-NOTICE.txt | 0 plugins/discovery-ec2/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 1 + .../discovery-ec2/licenses/log4j-LICENSE.txt | 202 ++++++++++++++++++ .../discovery-ec2/licenses/log4j-NOTICE.txt | 5 + plugins/discovery-gce/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 1 + .../discovery-gce/licenses/log4j-LICENSE.txt | 202 ++++++++++++++++++ .../discovery-gce/licenses/log4j-NOTICE.txt | 5 + plugins/repository-gcs/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 1 + .../repository-gcs/licenses/log4j-LICENSE.txt | 202 ++++++++++++++++++ .../repository-gcs/licenses/log4j-NOTICE.txt | 5 + plugins/repository-hdfs/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 1 + .../licenses/log4j-LICENSE.txt | 202 ++++++++++++++++++ .../repository-hdfs/licenses/log4j-NOTICE.txt | 5 + plugins/repository-s3/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 1 + .../repository-s3/licenses/log4j-LICENSE.txt | 202 ++++++++++++++++++ .../repository-s3/licenses/log4j-NOTICE.txt | 5 + plugins/transport-nio/build.gradle | 4 + .../common/logging/JsonLoggerTests.java | 2 +- .../org/elasticsearch/wildfly/WildflyIT.java | 5 +- server/build.gradle | 2 - .../plugins/PluginsServiceTests.java | 2 +- test/framework/build.gradle | 6 +- x-pack/plugin/core/build.gradle | 1 + .../licenses/log4j-1.2-api-2.11.1.jar.sha1 | 1 + x-pack/plugin/core/licenses/log4j-LICENSE.txt | 202 ++++++++++++++++++ x-pack/plugin/core/licenses/log4j-NOTICE.txt | 5 + .../core/indexing/AsyncTwoPhaseIndexer.java | 5 +- ...ransportStartDataFrameTransformAction.java | 4 +- .../xpack/rollup/job/IndexerUtils.java | 5 +- .../xpack/rollup/job/RollupJobTask.java | 5 +- 43 files changed, 1284 insertions(+), 227 deletions(-) delete mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java rename {server => plugins/discovery-azure-classic}/licenses/log4j-1.2-api-2.11.1.jar.sha1 (100%) rename {server => plugins/discovery-azure-classic}/licenses/log4j-LICENSE.txt (100%) rename {server => plugins/discovery-azure-classic}/licenses/log4j-NOTICE.txt (100%) create mode 100644 plugins/discovery-ec2/licenses/log4j-1.2-api-2.11.1.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/log4j-LICENSE.txt create mode 100644 plugins/discovery-ec2/licenses/log4j-NOTICE.txt create mode 100644 plugins/discovery-gce/licenses/log4j-1.2-api-2.11.1.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/log4j-LICENSE.txt create mode 100644 plugins/discovery-gce/licenses/log4j-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/log4j-1.2-api-2.11.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/log4j-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/log4j-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/log4j-1.2-api-2.11.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/log4j-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/log4j-NOTICE.txt create mode 100644 plugins/repository-s3/licenses/log4j-1.2-api-2.11.1.jar.sha1 create mode 100644 plugins/repository-s3/licenses/log4j-LICENSE.txt create mode 100644 plugins/repository-s3/licenses/log4j-NOTICE.txt create mode 100644 x-pack/plugin/core/licenses/log4j-1.2-api-2.11.1.jar.sha1 create mode 100644 x-pack/plugin/core/licenses/log4j-LICENSE.txt create mode 100644 x-pack/plugin/core/licenses/log4j-NOTICE.txt diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index d64e0aff774..b4ec74355d2 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -76,6 +76,10 @@ thirdPartyAudit { 'org.apache.commons.logging.Log', 'org.apache.commons.logging.LogFactory', + // from Log4j (deliberate, Netty will fallback to Log4j 2) + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + // from io.netty.handler.ssl.OpenSslEngine (netty) 'io.netty.internal.tcnative.Buffer', 'io.netty.internal.tcnative.Library', diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 8b31e0bcb28..5602aaba912 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -96,10 +96,6 @@ import static org.elasticsearch.http.netty4.cors.Netty4CorsHandler.ANY_ORIGIN; public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private static final Logger logger = LogManager.getLogger(Netty4HttpServerTransport.class); - static { - Netty4Utils.setup(); - } - /* * Size in bytes of an individual message received by io.netty.handler.codec.MessageAggregator which accumulates the content for an * HTTP request. This number is used for estimating the maximum number of allowed buffers before the MessageAggregator's internal diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java deleted file mode 100644 index 4eca1803b63..00000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport.netty4; - -import io.netty.util.internal.logging.AbstractInternalLogger; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.SuppressLoggerChecks; - -@SuppressLoggerChecks(reason = "safely delegates to logger") -class Netty4InternalESLogger extends AbstractInternalLogger { - - private final Logger logger; - - Netty4InternalESLogger(final String name) { - super(name); - this.logger = LogManager.getLogger(name); - } - - @Override - public boolean isTraceEnabled() { - return logger.isTraceEnabled(); - } - - @Override - public void trace(String msg) { - logger.trace(msg); - } - - @Override - public void trace(String format, Object arg) { - logger.trace(format, arg); - } - - @Override - public void trace(String format, Object argA, Object argB) { - logger.trace(format, argA, argB); - } - - @Override - public void trace(String format, Object... arguments) { - logger.trace(format, arguments); - } - - @Override - public void trace(String msg, Throwable t) { - logger.trace(msg, t); - } - - @Override - public boolean isDebugEnabled() { - return logger.isDebugEnabled(); - } - - @Override - public void debug(String msg) { - logger.debug(msg); - } - - @Override - public void debug(String format, Object arg) { - logger.debug(format, arg); - } - - @Override - public void debug(String format, Object argA, Object argB) { - logger.debug(format, argA, argB); - } - - @Override - public void debug(String format, Object... arguments) { - logger.debug(format, arguments); - } - - @Override - public void debug(String msg, Throwable t) { - logger.debug(msg, t); - } - - @Override - public boolean isInfoEnabled() { - return logger.isInfoEnabled(); - } - - @Override - public void info(String msg) { - logger.info(msg); - } - - @Override - public void info(String format, Object arg) { - logger.info(format, arg); - } - - @Override - public void info(String format, Object argA, Object argB) { - logger.info(format, argA, argB); - } - - @Override - public void info(String format, Object... arguments) { - logger.info(format, arguments); - } - - @Override - public void info(String msg, Throwable t) { - logger.info(msg, t); - } - - @Override - public boolean isWarnEnabled() { - return logger.isWarnEnabled(); - } - - @Override - public void warn(String msg) { - logger.warn(msg); - } - - @Override - public void warn(String format, Object arg) { - logger.warn(format, arg); - } - - @Override - public void warn(String format, Object... arguments) { - logger.warn(format, arguments); - } - - @Override - public void warn(String format, Object argA, Object argB) { - logger.warn(format, argA, argB); - } - - @Override - public void warn(String msg, Throwable t) { - logger.warn(msg, t); - } - - @Override - public boolean isErrorEnabled() { - return logger.isErrorEnabled(); - } - - @Override - public void error(String msg) { - logger.error(msg); - } - - @Override - public void error(String format, Object arg) { - logger.error(format, arg); - } - - @Override - public void error(String format, Object argA, Object argB) { - logger.error(format, argA, argB); - } - - @Override - public void error(String format, Object... arguments) { - logger.error(format, arguments); - } - - @Override - public void error(String msg, Throwable t) { - logger.error(msg, t); - } - -} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 690e5bd3824..db6ebb28749 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -78,10 +78,6 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF public class Netty4Transport extends TcpTransport { private static final Logger logger = LogManager.getLogger(Netty4Transport.class); - static { - Netty4Utils.setup(); - } - public static final Setting WORKER_COUNT = new Setting<>("transport.netty.worker_count", (s) -> Integer.toString(EsExecutors.numberOfProcessors(s) * 2), diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 35928b6c3c8..211a574dc98 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -23,8 +23,6 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; import io.netty.util.NettyRuntime; -import io.netty.util.internal.logging.InternalLogger; -import io.netty.util.internal.logging.InternalLoggerFactory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.elasticsearch.common.Booleans; @@ -38,21 +36,6 @@ import java.util.concurrent.atomic.AtomicBoolean; public class Netty4Utils { - static { - InternalLoggerFactory.setDefaultFactory(new InternalLoggerFactory() { - - @Override - public InternalLogger newInstance(final String name) { - return new Netty4InternalESLogger(name); - } - - }); - } - - public static void setup() { - - } - private static AtomicBoolean isAvailableProcessorsSet = new AtomicBoolean(); /** diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index d2949371a74..8fd50e3c5cf 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -35,6 +35,7 @@ dependencies { compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile "commons-lang:commons-lang:2.6" compile "commons-io:commons-io:2.4" diff --git a/server/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/log4j-1.2-api-2.11.1.jar.sha1 similarity index 100% rename from server/licenses/log4j-1.2-api-2.11.1.jar.sha1 rename to plugins/discovery-azure-classic/licenses/log4j-1.2-api-2.11.1.jar.sha1 diff --git a/server/licenses/log4j-LICENSE.txt b/plugins/discovery-azure-classic/licenses/log4j-LICENSE.txt similarity index 100% rename from server/licenses/log4j-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/log4j-LICENSE.txt diff --git a/server/licenses/log4j-NOTICE.txt b/plugins/discovery-azure-classic/licenses/log4j-NOTICE.txt similarity index 100% rename from server/licenses/log4j-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/log4j-NOTICE.txt diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index ecc8150c552..193ebd0b077 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -32,6 +32,7 @@ dependencies { compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile 'com.fasterxml.jackson.core:jackson-databind:2.8.11.3' compile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" diff --git a/plugins/discovery-ec2/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/plugins/discovery-ec2/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 00000000000..575d75dbda8 --- /dev/null +++ b/plugins/discovery-ec2/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/log4j-LICENSE.txt b/plugins/discovery-ec2/licenses/log4j-LICENSE.txt new file mode 100644 index 00000000000..6279e5206de --- /dev/null +++ b/plugins/discovery-ec2/licenses/log4j-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-ec2/licenses/log4j-NOTICE.txt b/plugins/discovery-ec2/licenses/log4j-NOTICE.txt new file mode 100644 index 00000000000..03757323600 --- /dev/null +++ b/plugins/discovery-ec2/licenses/log4j-NOTICE.txt @@ -0,0 +1,5 @@ +Apache log4j +Copyright 2007 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 697cc3780a1..25baa4b17ce 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -17,6 +17,7 @@ dependencies { compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile "commons-codec:commons-codec:${versions.commonscodec}" } diff --git a/plugins/discovery-gce/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/plugins/discovery-gce/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 00000000000..575d75dbda8 --- /dev/null +++ b/plugins/discovery-gce/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/log4j-LICENSE.txt b/plugins/discovery-gce/licenses/log4j-LICENSE.txt new file mode 100644 index 00000000000..6279e5206de --- /dev/null +++ b/plugins/discovery-gce/licenses/log4j-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-gce/licenses/log4j-NOTICE.txt b/plugins/discovery-gce/licenses/log4j-NOTICE.txt new file mode 100644 index 00000000000..03757323600 --- /dev/null +++ b/plugins/discovery-gce/licenses/log4j-NOTICE.txt @@ -0,0 +1,5 @@ +Apache log4j +Copyright 2007 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 288ab3c99f1..ff1f5bc61ed 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -33,6 +33,7 @@ dependencies { compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile 'com.google.api:api-common:1.7.0' compile 'com.google.api:gax:1.30.0' diff --git a/plugins/repository-gcs/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/plugins/repository-gcs/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 00000000000..575d75dbda8 --- /dev/null +++ b/plugins/repository-gcs/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/log4j-LICENSE.txt b/plugins/repository-gcs/licenses/log4j-LICENSE.txt new file mode 100644 index 00000000000..6279e5206de --- /dev/null +++ b/plugins/repository-gcs/licenses/log4j-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/log4j-NOTICE.txt b/plugins/repository-gcs/licenses/log4j-NOTICE.txt new file mode 100644 index 00000000000..03757323600 --- /dev/null +++ b/plugins/repository-gcs/licenses/log4j-NOTICE.txt @@ -0,0 +1,5 @@ +Apache log4j +Copyright 2007 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index ef72f66244b..1b9d61f5515 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -52,6 +52,7 @@ dependencies { compile 'com.google.guava:guava:11.0.2' compile 'com.google.protobuf:protobuf-java:2.5.0' compile 'commons-logging:commons-logging:1.1.3' + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile 'commons-cli:commons-cli:1.2' compile "commons-codec:commons-codec:${versions.commonscodec}" compile 'commons-collections:commons-collections:3.2.2' diff --git a/plugins/repository-hdfs/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/plugins/repository-hdfs/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 00000000000..575d75dbda8 --- /dev/null +++ b/plugins/repository-hdfs/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/log4j-LICENSE.txt b/plugins/repository-hdfs/licenses/log4j-LICENSE.txt new file mode 100644 index 00000000000..6279e5206de --- /dev/null +++ b/plugins/repository-hdfs/licenses/log4j-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/log4j-NOTICE.txt b/plugins/repository-hdfs/licenses/log4j-NOTICE.txt new file mode 100644 index 00000000000..03757323600 --- /dev/null +++ b/plugins/repository-hdfs/licenses/log4j-NOTICE.txt @@ -0,0 +1,5 @@ +Apache log4j +Copyright 2007 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 55b31a73b74..e3ba48e9b84 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -39,6 +39,7 @@ dependencies { compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" compile 'com.fasterxml.jackson.core:jackson-databind:2.8.11.3' diff --git a/plugins/repository-s3/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/plugins/repository-s3/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 00000000000..575d75dbda8 --- /dev/null +++ b/plugins/repository-s3/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/log4j-LICENSE.txt b/plugins/repository-s3/licenses/log4j-LICENSE.txt new file mode 100644 index 00000000000..6279e5206de --- /dev/null +++ b/plugins/repository-s3/licenses/log4j-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-s3/licenses/log4j-NOTICE.txt b/plugins/repository-s3/licenses/log4j-NOTICE.txt new file mode 100644 index 00000000000..03757323600 --- /dev/null +++ b/plugins/repository-s3/licenses/log4j-NOTICE.txt @@ -0,0 +1,5 @@ +Apache log4j +Copyright 2007 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index 7800ff6951a..a982758482c 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -53,6 +53,10 @@ thirdPartyAudit { 'org.apache.commons.logging.Log', 'org.apache.commons.logging.LogFactory', + // from Log4j (deliberate, Netty will fallback to Log4j 2) + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', diff --git a/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java b/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java index bbb20737c47..5ba61bef6d1 100644 --- a/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java +++ b/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.logging; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LoggerContext; diff --git a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java index 9aebffdc4ce..995501d78f2 100644 --- a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java +++ b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java @@ -27,7 +27,8 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestRuleLimitSysouts; import org.elasticsearch.cluster.ClusterModule; @@ -53,7 +54,7 @@ import static org.hamcrest.Matchers.instanceOf; @TestRuleLimitSysouts.Limit(bytes = 14000) public class WildflyIT extends LuceneTestCase { - Logger logger = Logger.getLogger(WildflyIT.class); + Logger logger = LogManager.getLogger(WildflyIT.class); public void testTransportClient() throws URISyntaxException, IOException { try (CloseableHttpClient client = HttpClientBuilder.create().build()) { diff --git a/server/build.gradle b/server/build.gradle index 6e0353eae7f..bf9d85e8766 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -118,8 +118,6 @@ dependencies { // logging compile "org.apache.logging.log4j:log4j-api:${versions.log4j}" compile "org.apache.logging.log4j:log4j-core:${versions.log4j}", optional - // to bridge dependencies that are still on Log4j 1 to Log4j 2 - compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}", optional // repackaged jna with native bits linked against all elastic supported platforms compile "org.elasticsearch:jna:${versions.jna}" diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index e447107aac8..78194888ebc 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugins; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 686756c6e53..18978bd2d75 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -52,7 +52,11 @@ thirdPartyAudit.ignoreMissingClasses ( 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', - 'org.apache.log.Logger' + 'org.apache.log.Logger', + 'org.apache.log4j.Category', + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + 'org.apache.log4j.Priority' ) test { diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index fc0dd90a730..832b0af18fb 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -30,6 +30,7 @@ dependencies { compile "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" compile "commons-codec:commons-codec:${versions.commonscodec}" // security deps diff --git a/x-pack/plugin/core/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/x-pack/plugin/core/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 00000000000..575d75dbda8 --- /dev/null +++ b/x-pack/plugin/core/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/log4j-LICENSE.txt b/x-pack/plugin/core/licenses/log4j-LICENSE.txt new file mode 100644 index 00000000000..6279e5206de --- /dev/null +++ b/x-pack/plugin/core/licenses/log4j-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/core/licenses/log4j-NOTICE.txt b/x-pack/plugin/core/licenses/log4j-NOTICE.txt new file mode 100644 index 00000000000..03757323600 --- /dev/null +++ b/x-pack/plugin/core/licenses/log4j-NOTICE.txt @@ -0,0 +1,5 @@ +Apache log4j +Copyright 2007 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index 80b0378ae35..0c4477b6b70 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -6,7 +6,8 @@ package org.elasticsearch.xpack.core.indexing; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -35,7 +36,7 @@ import java.util.concurrent.atomic.AtomicReference; * @param Type that defines a job position to be defined by the implementation. */ public abstract class AsyncTwoPhaseIndexer { - private static final Logger logger = Logger.getLogger(AsyncTwoPhaseIndexer.class.getName()); + private static final Logger logger = LogManager.getLogger(AsyncTwoPhaseIndexer.class.getName()); private final JobStats stats; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java index 39c0c74bbd5..8b7bcb8d764 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.dataframe.action; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index 94d64b17de8..2ea2fc792d0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.xpack.rollup.job; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.search.aggregations.Aggregation; @@ -32,7 +33,7 @@ import java.util.stream.Collectors; * They are extracted out as static classes mainly to make testing easier. */ class IndexerUtils { - private static final Logger logger = Logger.getLogger(IndexerUtils.class.getName()); + private static final Logger logger = LogManager.getLogger(IndexerUtils.class); /** * The only entry point in this class. You hand this method an aggregation and an index diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index fecda3a2ce2..2cbcf2fe83d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.xpack.rollup.job; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkAction; @@ -46,7 +47,7 @@ import java.util.concurrent.atomic.AtomicReference; * Each RollupJobTask also registers itself into the Scheduler so that it can be triggered on the cron's interval. */ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { - private static final Logger logger = Logger.getLogger(RollupJobTask.class.getName()); + private static final Logger logger = LogManager.getLogger(RollupJobTask.class.getName()); static final String SCHEDULE_NAME = RollupField.TASK_NAME + "/schedule"; From 5f9382acc24d93c2e46e48b26669db62431763ea Mon Sep 17 00:00:00 2001 From: Alex Pang Date: Thu, 30 May 2019 18:01:04 -0400 Subject: [PATCH 015/210] Fix docs typo in the certutil CSR mode (#42593) Changes the mention of `cert` to `csr`. Co-Authored-By: Alex Pang --- docs/reference/commands/certutil.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/commands/certutil.asciidoc b/docs/reference/commands/certutil.asciidoc index 6f4d3224d7a..07a3f96738d 100644 --- a/docs/reference/commands/certutil.asciidoc +++ b/docs/reference/commands/certutil.asciidoc @@ -103,7 +103,7 @@ which prompts you for details about each instance. Alternatively, you can use the `--in` parameter to specify a YAML file that contains details about the instances. -The `cert` mode produces a single zip file which contains the CSRs and the +The `csr` mode produces a single zip file which contains the CSRs and the private keys for each instance. Each CSR is provided as a standard PEM encoding of a PKCS#10 CSR. Each key is provided as a PEM encoding of an RSA private key. From f6779de2b7e66fffa2b12e4ddb0c2471ea783d2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 31 May 2019 06:19:47 +0200 Subject: [PATCH 016/210] Increase maximum forecast interval to 10 years. (#41082) (#42710) Increase the maximum duration to ~10 years (3650 days). --- .../xpack/core/ml/action/ForecastJobAction.java | 4 ++-- .../src/test/resources/rest-api-spec/test/ml/forecast.yml | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java index 0ea66ad0937..fb107579c6e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java @@ -48,8 +48,8 @@ public class ForecastJobAction extends Action { public static final ParseField DURATION = new ParseField("duration"); public static final ParseField EXPIRES_IN = new ParseField("expires_in"); - // Max allowed duration: 8 weeks - private static final TimeValue MAX_DURATION = TimeValue.parseTimeValue("56d", ""); + // Max allowed duration: 10 years + private static final TimeValue MAX_DURATION = TimeValue.parseTimeValue("3650d", ""); private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/forecast.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/forecast.yml index 998f70de9d3..a81b6dba08e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/forecast.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/forecast.yml @@ -47,6 +47,14 @@ setup: job_id: "forecast-job" duration: "-1s" +--- +"Test forecast given duration is too large": + - do: + catch: /\[duration\] must be 3650d or less[:] \[3651d\]/ + ml.forecast: + job_id: "forecast-job" + duration: "3651d" + --- "Test forecast given expires_in is negative": - do: From 0ce7e7a4d840f148913455d58b9ef7b9575f4035 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 31 May 2019 09:42:32 +0200 Subject: [PATCH 017/210] Enable tests failing due to java-joda warnings (#42693) Tests were failing in mixed cluster after more broad warnings were introduced in 6.x These tests were using `yyyy-MM-dd` pattern which is now warning about the change of `y` to `u`. However, using predefined pattern `strict_date` which uses the same format prevents the warning from being generate and allow smooth upgrade/work in mixed cluster. relates #42679 --- .../test/search.aggregation/230_composite.yml | 8 ++++---- .../test/search/140_pre_filter_search_shards.yml | 12 +++--------- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 88fb807ba2e..5d63edf1360 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -284,7 +284,7 @@ setup: "date_histogram": { "field": "date", "interval": "1d", - "format": "yyyy-MM-dd" + "format": "strict_date" } } } @@ -316,7 +316,7 @@ setup: "date_histogram": { "field": "date", "interval": "1d", - "format": "yyyy-MM-dd" + "format": "strict_date" } } } @@ -347,7 +347,7 @@ setup: "date_histogram": { "field": "date", "calendar_interval": "1d", - "format": "yyyy-MM-dd" + "format": "strict_date" } } } @@ -377,7 +377,7 @@ setup: "date_histogram": { "field": "date", "calendar_interval": "1d", - "format": "yyyy-MM-dd" + "format": "strict_date" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index 030fb5631c5..7c960878d85 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -9,7 +9,7 @@ setup: properties: created_at: type: date - format: "yyyy-MM-dd" + format: "strict_date" - do: indices.create: index: index_2 @@ -20,7 +20,7 @@ setup: properties: created_at: type: date - format: "yyyy-MM-dd" + format: "strict_date" - do: indices.create: index: index_3 @@ -31,14 +31,11 @@ setup: properties: created_at: type: date - format: "yyyy-MM-dd" + format: "strict_date" --- "pre_filter_shard_size with invalid parameter": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42679" - do: catch: /preFilterShardSize must be >= 1/ search: @@ -48,9 +45,6 @@ setup: --- "pre_filter_shard_size with shards that have no hit": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42679" - do: index: index: index_1 From ab6b86bac920bf9f7a91903aefe7484bda3d7e07 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 31 May 2019 11:23:36 +0100 Subject: [PATCH 018/210] Add option to ObjectParser to consume unknown fields (#42491) ObjectParser has two ways of dealing with unknown fields: ignore them entirely, or throw an error. Sometimes it can be useful instead to gather up these unknown fields and record them separately, for example as arbitrary entries in a map. This commit adds the ability to specify an unknown field consumer on an ObjectParser, called with the field name and parsed value of each unknown field encountered during parsing. The public API of ObjectParser is largely unchanged, with a single new constructor method and interface definition. --- .../common/xcontent/ObjectParser.java | 103 ++++++++++++++---- .../common/xcontent/ObjectParserTests.java | 40 +++++++ 2 files changed, 122 insertions(+), 21 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index ee5e3347f8d..c80c5bdb0d0 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -78,14 +78,63 @@ public final class ObjectParser extends AbstractObjectParser { + + void acceptUnknownField(String parserName, String field, XContentLocation location, XContentParser parser, + Value value, Context context) throws IOException; + } + + private static UnknownFieldParser ignoreUnknown() { + return (n, f, l, p, v, c) -> p.skipChildren(); + } + + private static UnknownFieldParser errorOnUnknown() { + return (n, f, l, p, v, c) -> { + throw new XContentParseException(l, "[" + n + "] unknown field [" + f + "], parser not found"); + }; + } + + /** + * Defines how to consume a parsed undefined field + */ + public interface UnknownFieldConsumer { + void accept(Value target, String field, Object value); + } + + private static UnknownFieldParser consumeUnknownField(UnknownFieldConsumer consumer) { + return (parserName, field, location, parser, value, context) -> { + XContentParser.Token t = parser.currentToken(); + switch (t) { + case VALUE_STRING: + consumer.accept(value, field, parser.text()); + break; + case VALUE_NUMBER: + consumer.accept(value, field, parser.numberValue()); + break; + case VALUE_BOOLEAN: + consumer.accept(value, field, parser.booleanValue()); + break; + case VALUE_NULL: + consumer.accept(value, field, null); + break; + case START_OBJECT: + consumer.accept(value, field, parser.map()); + break; + case START_ARRAY: + consumer.accept(value, field, parser.list()); + break; + default: + throw new XContentParseException(parser.getTokenLocation(), + "[" + parserName + "] cannot parse field [" + field + "] with value type [" + t + "]"); + } + }; + } + private final Map fieldParserMap = new HashMap<>(); private final String name; private final Supplier valueSupplier; - /** - * Should this parser ignore unknown fields? This should generally be set to true only when parsing responses from external systems, - * never when parsing requests from users. - */ - private final boolean ignoreUnknownFields; + + private final UnknownFieldParser unknownFieldParser; /** * Creates a new ObjectParser instance with a name. This name is used to reference the parser in exceptions and messages. @@ -95,25 +144,45 @@ public final class ObjectParser extends AbstractObjectParser valueSupplier) { - this(name, false, valueSupplier); + this(name, errorOnUnknown(), valueSupplier); } /** - * Creates a new ObjectParser instance which a name. + * Creates a new ObjectParser instance with a name. * @param name the parsers name, used to reference the parser in exceptions and messages. * @param ignoreUnknownFields Should this parser ignore unknown fields? This should generally be set to true only when parsing * responses from external systems, never when parsing requests from users. * @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser. */ public ObjectParser(String name, boolean ignoreUnknownFields, @Nullable Supplier valueSupplier) { + this(name, ignoreUnknownFields ? ignoreUnknown() : errorOnUnknown(), valueSupplier); + } + + /** + * Creates a new ObjectParser instance with a name. + * @param name the parsers name, used to reference the parser in exceptions and messages. + * @param unknownFieldConsumer how to consume parsed unknown fields + * @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser. + */ + public ObjectParser(String name, UnknownFieldConsumer unknownFieldConsumer, @Nullable Supplier valueSupplier) { + this(name, consumeUnknownField(unknownFieldConsumer), valueSupplier); + } + + /** + * Creates a new ObjectParser instance with a name. + * @param name the parsers name, used to reference the parser in exceptions and messages. + * @param unknownFieldParser how to parse unknown fields + * @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser. + */ + private ObjectParser(String name, UnknownFieldParser unknownFieldParser, @Nullable Supplier valueSupplier) { this.name = name; this.valueSupplier = valueSupplier; - this.ignoreUnknownFields = ignoreUnknownFields; + this.unknownFieldParser = unknownFieldParser; } /** @@ -152,17 +221,18 @@ public final class ObjectParser extends AbstractObjectParser extends AbstractObjectParser parser; private final EnumSet supportedTokens; diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index e089b8a956a..a303fb46ec7 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -33,11 +33,14 @@ import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; public class ObjectParserTests extends ESTestCase { @@ -733,4 +736,41 @@ public class ObjectParserTests extends ESTestCase { this.foo = foo; } } + + private static class ObjectWithArbitraryFields { + String name; + Map fields = new HashMap<>(); + void setField(String key, Object value) { + fields.put(key, value); + } + void setName(String name) { + this.name = name; + } + } + + public void testConsumeUnknownFields() throws IOException { + XContentParser parser = createParser(JsonXContent.jsonXContent, + "{\n" + + " \"test\" : \"foo\",\n" + + " \"test_number\" : 2,\n" + + " \"name\" : \"geoff\",\n" + + " \"test_boolean\" : true,\n" + + " \"test_null\" : null,\n" + + " \"test_array\": [1,2,3,4],\n" + + " \"test_nested\": { \"field\" : \"value\", \"field2\" : [ \"list1\", \"list2\" ] }\n" + + "}"); + ObjectParser op + = new ObjectParser<>("unknown", ObjectWithArbitraryFields::setField, ObjectWithArbitraryFields::new); + op.declareString(ObjectWithArbitraryFields::setName, new ParseField("name")); + + ObjectWithArbitraryFields o = op.parse(parser, null); + assertEquals("geoff", o.name); + assertEquals(6, o.fields.size()); + assertEquals("foo", o.fields.get("test")); + assertEquals(2, o.fields.get("test_number")); + assertEquals(true, o.fields.get("test_boolean")); + assertNull(o.fields.get("test_null")); + assertThat(o.fields.get("test_array"), instanceOf(List.class)); + assertThat(o.fields.get("test_nested"), instanceOf(Map.class)); + } } From 01446ff4bd7a389b694b7933cf3f2adf12a3260e Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Fri, 31 May 2019 12:56:07 +0200 Subject: [PATCH 019/210] [Docs] Mention search related deprecations (#42751) Add deprecation entries for 7.3 regarding `common` query and `cutoff_frequency` parameter. Follows: #42691 --- docs/reference/migration/migrate_7_3.asciidoc | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_7_3.asciidoc b/docs/reference/migration/migrate_7_3.asciidoc index ef205b1c60f..ee23c1f772e 100644 --- a/docs/reference/migration/migrate_7_3.asciidoc +++ b/docs/reference/migration/migrate_7_3.asciidoc @@ -38,4 +38,22 @@ appropriate. ==== IndexStorePlugin changes IndexStore and DirectoryService have been replaced by a stateless and simple -DirectoryFactory interface to create custom Lucene directory instances per shard. \ No newline at end of file +DirectoryFactory interface to create custom Lucene directory instances per shard. + + +[float] +[[breaking_73_search_changes]] +=== Search Changes + +[float] +==== Deprecation of queries + +The `common` query has been deprecated. The same functionality can be achieved +by the `match` query if the total number of hits is not tracked. + +[float] +===== Deprecation of query parameters + +The `cutoff_frequency` parameter has been deprecated for `match` and `multi_match` +queries. The same functionality can be achieved without any configuration provided +that the total number of hits is not tracked. From d5061a151a3e303827226ab09f9b561643d44b69 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 31 May 2019 13:58:49 +0200 Subject: [PATCH 020/210] Remove suppresions for "unchecked" for hamcrest varargs methods Backport(41528) #42749 In hamcrest 2.1 warnings for unchecked varargs were fixed by hamcrest using @SafeVarargs for those matchers where this warning occurred. This PR is aimed to remove these annotations when Matchers.contains ,Matchers.containsInAnyOrder or Matchers.hasItems was used backport #41528 --- .../java/org/elasticsearch/client/BulkProcessorIT.java | 2 -- .../client/BulkRequestWithGlobalParametersIT.java | 5 ----- .../elasticsearch/common/logging/JsonLoggerTests.java | 2 -- .../xpack/restart/FullClusterRestartIT.java | 1 - .../security/authc/ldap/SearchGroupsResolverTests.java | 1 - .../authc/ldap/ActiveDirectoryGroupsResolverTests.java | 1 - .../authc/ldap/ActiveDirectorySessionFactoryTests.java | 9 --------- .../authc/ldap/UserAttributeGroupsResolverTests.java | 3 --- 8 files changed, 24 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index 762e927551b..2aa9457bcd8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -291,7 +291,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), testDocs); } - @SuppressWarnings("unchecked") public void testGlobalParametersAndSingleRequest() throws Exception { createIndexWithMultipleShards("test"); @@ -326,7 +325,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { assertThat(blogs, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); } - @SuppressWarnings("unchecked") public void testGlobalParametersAndBulkProcessor() throws Exception { createIndexWithMultipleShards("test"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java index 3020eb0329b..dc49e6f88a6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java @@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.nullValue; public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTestCase { - @SuppressWarnings("unchecked") public void testGlobalPipelineOnBulkRequest() throws IOException { createFieldAddingPipleine("xyz", "fieldNameXYZ", "valueXYZ"); @@ -83,7 +82,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldXYZ"), nullValue()))); } - @SuppressWarnings("unchecked") public void testMixPipelineOnRequestAndGlobal() throws IOException { createFieldAddingPipleine("globalId", "fieldXYZ", "valueXYZ"); createFieldAddingPipleine("perIndexId", "someNewField", "someValue"); @@ -153,7 +151,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest assertThat(hits, everyItem(hasType("global_type"))); } - @SuppressWarnings("unchecked") public void testTypeGlobalAndPerRequest() throws IOException { BulkRequest request = new BulkRequest(null, "global_type"); request.add(new IndexRequest("index1", "local_type", "1") @@ -171,7 +168,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest .and(hasType("global_type")))); } - @SuppressWarnings("unchecked") public void testGlobalRouting() throws IOException { createIndexWithMultipleShards("index"); BulkRequest request = new BulkRequest(null); @@ -189,7 +185,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest assertThat(hits, containsInAnyOrder(hasId("1"), hasId("2"))); } - @SuppressWarnings("unchecked") public void testMixLocalAndGlobalRouting() throws IOException { BulkRequest request = new BulkRequest(null); request.routing("globalRouting"); diff --git a/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java b/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java index 5ba61bef6d1..2416eb02bfd 100644 --- a/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java +++ b/qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java @@ -67,7 +67,6 @@ public class JsonLoggerTests extends ESTestCase { super.tearDown(); } - @SuppressWarnings("unchecked") public void testJsonLayout() throws IOException { final Logger testLogger = LogManager.getLogger("test"); @@ -90,7 +89,6 @@ public class JsonLoggerTests extends ESTestCase { } } - @SuppressWarnings("unchecked") public void testPrefixLoggerInJson() throws IOException { Logger shardIdLogger = Loggers.getLogger("shardIdLogger", ShardId.fromString("[indexName][123]")); shardIdLogger.info("This is an info message with a shardId"); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index d7355269a11..ae7cc95c1ab 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -442,7 +442,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { return StreamsUtils.copyToStringFromClasspath("/org/elasticsearch/xpack/restart/" + watch); } - @SuppressWarnings("unchecked") private void assertOldTemplatesAreDeleted() throws IOException { Map templates = entityAsMap(client().performRequest(new Request("GET", "/_template"))); assertThat(templates.keySet(), not(hasItems(is("watches"), startsWith("watch-history"), is("triggered_watches")))); diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java index 036cf8ad0db..f24bcface06 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java @@ -23,7 +23,6 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -@SuppressWarnings("unchecked") public class SearchGroupsResolverTests extends GroupsResolverTestCase { private static final String BRUCE_BANNER_DN = "uid=hulk,ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java index 1a4fd0242db..7fbbd217ae9 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java @@ -35,7 +35,6 @@ public class ActiveDirectoryGroupsResolverTests extends GroupsResolverTestCase { } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/35738") - @SuppressWarnings("unchecked") public void testResolveSubTree() throws Exception { Settings settings = Settings.builder() .put("xpack.security.authc.realms.active_directory.ad.group_search.scope", LdapSearchScope.SUB_TREE) diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 73e1df5dd08..3dc432b482b 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -66,7 +66,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT return false; } - @SuppressWarnings("unchecked") public void testAdAuth() throws Exception { RealmConfig config = configureRealm("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false)); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { @@ -101,7 +100,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT return new RealmConfig(identifier, mergedSettings, env, new ThreadContext(globalSettings)); } - @SuppressWarnings("unchecked") public void testNetbiosAuth() throws Exception { final String adUrl = randomFrom(AD_LDAP_URL, AD_LDAP_GC_URL); RealmConfig config = configureRealm("ad-test", buildAdSettings(adUrl, AD_DOMAIN, false)); @@ -139,7 +137,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") public void testAuthenticate() throws Exception { Settings settings = buildAdSettings(REALM_ID, AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); @@ -163,7 +160,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") public void testAuthenticateBaseUserSearch() throws Exception { Settings settings = buildAdSettings(REALM_ID, AD_LDAP_URL, AD_DOMAIN, "CN=Bruce Banner, CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.BASE, false); @@ -208,7 +204,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") public void testAuthenticateWithUserPrincipalName() throws Exception { Settings settings = buildAdSettings(REALM_ID, AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); @@ -229,7 +224,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") public void testAuthenticateWithSAMAccountName() throws Exception { Settings settings = buildAdSettings(REALM_ID, AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); @@ -251,7 +245,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") public void testCustomUserFilter() throws Exception { Settings settings = Settings.builder() .put(buildAdSettings(REALM_ID, AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", @@ -275,7 +268,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } - @SuppressWarnings("unchecked") public void testStandardLdapConnection() throws Exception { String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com"; String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; @@ -341,7 +333,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") public void testStandardLdapWithAttributeGroups() throws Exception { String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; Settings settings = LdapTestCase.buildLdapSettings(new String[]{AD_LDAP_URL}, userTemplate, false); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java index 24f0ecace67..38adbbe0190 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java @@ -29,7 +29,6 @@ public class UserAttributeGroupsResolverTests extends GroupsResolverTestCase { public static final String BRUCE_BANNER_DN = "cn=Bruce Banner,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; private static final RealmConfig.RealmIdentifier REALM_ID = new RealmConfig.RealmIdentifier("ldap", "realm1"); - @SuppressWarnings("unchecked") public void testResolve() throws Exception { //falling back on the 'memberOf' attribute UserAttributeGroupsResolver resolver = new UserAttributeGroupsResolver(config(REALM_ID, Settings.EMPTY)); @@ -42,7 +41,6 @@ public class UserAttributeGroupsResolverTests extends GroupsResolverTestCase { containsString("Philanthropists"))); } - @SuppressWarnings("unchecked") public void testResolveFromPreloadedAttributes() throws Exception { SearchRequest preSearch = new SearchRequest(BRUCE_BANNER_DN, SearchScope.BASE, LdapUtils.OBJECT_CLASS_PRESENCE_FILTER, "memberOf"); final Collection attributes = ldapConnection.searchForEntry(preSearch).getAttributes(); @@ -57,7 +55,6 @@ public class UserAttributeGroupsResolverTests extends GroupsResolverTestCase { containsString("Philanthropists"))); } - @SuppressWarnings("unchecked") public void testResolveCustomGroupAttribute() throws Exception { Settings settings = Settings.builder() .put("user_group_attribute", "seeAlso") From e687fd58fc547faac0725a04ee4d648d5d180d6b Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Fri, 31 May 2019 08:03:10 -0600 Subject: [PATCH 021/210] Re-enable token bwc tests (#42727) This commit re-enables token bwc tests that run as part of the rolling upgrade tests. These tests were muted while #42651 was being backported. --- .../upgrades/TokenBackwardsCompatibilityIT.java | 2 -- .../rest-api-spec/test/mixed_cluster/50_token_auth.yml | 6 ------ .../rest-api-spec/test/upgraded_cluster/50_token_auth.yml | 4 ---- 3 files changed, 12 deletions(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index 0eb0f696581..2245fa3ea1d 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.upgrades; import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -31,7 +30,6 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -@AwaitsFix(bugUrl = "need to backport #42651") public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase { private Collection twoClients = null; diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml index a34128579f3..f426d9b2525 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/50_token_auth.yml @@ -2,8 +2,6 @@ "Get the indexed token and use if to authenticate": - skip: features: headers - version: " - 7.99.99" - reason: "Need to backport PR #42651" - do: cluster.health: @@ -61,8 +59,6 @@ "Get the indexed refreshed access token and use if to authenticate": - skip: features: headers - version: " - 7.99.99" - reason: "Need to backport PR #42651" - do: get: @@ -115,8 +111,6 @@ "Get the indexed refresh token and use it to get another access token and authenticate": - skip: features: headers - version: " - 7.99.99" - reason: "Need to backport PR #42651" - do: get: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml index 64897707c15..430f94c1064 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml @@ -2,8 +2,6 @@ "Get the indexed token and use if to authenticate": - skip: features: headers - version: " - 8.0.0" - reason: "Need to backport PR #42651" - do: cluster.health: @@ -51,8 +49,6 @@ "Get the indexed refresh token and use if to get another access token and authenticate": - skip: features: headers - version: " - 8.0.0" - reason: "Need to backport PR #42651" - do: get: From 87ca762573cea2eab2ba4c7187630b46ac57cbde Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 31 May 2019 15:40:53 +0100 Subject: [PATCH 022/210] [ML] Add Kibana application privilege to data frame admin/user roles (#42757) Data frame transforms are restricted by different roles to ML, but share the ML UI. To prevent the ML UI being hidden for users who only have the data frame admin or user role, it is necessary to add the ML Kibana application privilege to the backend data frame roles. --- .../authz/store/ReservedRolesStore.java | 12 ++++++++-- .../authz/store/ReservedRolesStoreTests.java | 24 +++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 49d4159f139..ab06fc32e28 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -180,14 +180,22 @@ public class ReservedRolesStore implements BiConsumer, ActionListene RoleDescriptor.IndicesPrivileges.builder() .indices(".data-frame-notifications*") .privileges("view_index_metadata", "read").build() - }, null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + }, + new RoleDescriptor.ApplicationResourcePrivileges[] { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana-*").resources("*").privileges("reserved_ml").build() + }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) .put("data_frame_transforms_user", new RoleDescriptor("data_frame_transforms_user", new String[] { "monitor_data_frame_transforms" }, new RoleDescriptor.IndicesPrivileges[]{ RoleDescriptor.IndicesPrivileges.builder() .indices(".data-frame-notifications*") .privileges("view_index_metadata", "read").build() - }, null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + }, + new RoleDescriptor.ApplicationResourcePrivileges[] { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana-*").resources("*").privileges("reserved_ml").build() + }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) .put("watcher_admin", new RoleDescriptor("watcher_admin", new String[] { "manage_watcher" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(Watch.INDEX, TriggeredWatchStoreField.INDEX_NAME, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 78f9623f4fb..bf2c08a9138 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -1096,6 +1096,18 @@ public class ReservedRolesStoreTests extends ESTestCase { assertNoAccessAllowed(role, ".data-frame-internal-1"); // internal use only assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); + + final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), is(false)); + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-reserved_ml", "reserved_ml"), "*"), is(true)); + + final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-foo", "foo"), "*"), is(false)); + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); } public void testDataFrameTransformsUserRole() { @@ -1120,6 +1132,18 @@ public class ReservedRolesStoreTests extends ESTestCase { assertNoAccessAllowed(role, ".data-frame-internal-1"); assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); + + final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), is(false)); + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-reserved_ml", "reserved_ml"), "*"), is(true)); + + final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-foo", "foo"), "*"), is(false)); + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); } public void testWatcherAdminRole() { From 478919c0bb7589a2c2c3f65380ca704182717300 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Fri, 31 May 2019 11:04:30 -0400 Subject: [PATCH 023/210] [DOCS] Remove unneeded `ifdef::asciidoctor[]` conditionals (#42758) Several `ifdef::asciidoctor` conditionals were added so that AsciiDoc and Asciidoctor doc builds rendered consistently. With https://github.com/elastic/docs/pull/827, Elasticsearch Reference documentation migrated completely to Asciidoctor. We no longer need to support AsciiDoc so we can remove these conditionals. Resolves #41722 --- .../settings/notification-settings.asciidoc | 6 ------ docs/reference/settings/ssl-settings.asciidoc | 15 --------------- 2 files changed, 21 deletions(-) diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index ac7160bd20a..a2eb84bc211 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -37,14 +37,8 @@ required. For more information, see {xpack-ref}/encrypting-data.html[Encrypting sensitive data in {watcher}]. `xpack.watcher.history.cleaner_service.enabled`:: -ifdef::asciidoctor[] added:[6.3.0,Default changed to `true`.] deprecated:[7.0.0,Watcher history indices are now managed by the `watch-history-ilm-policy` ILM policy] -endif::[] -ifndef::asciidoctor[] -added[6.3.0,Default changed to `true`.] -deprecated[7.0.0,Watcher history indices are now managed by the `watch-history-ilm-policy` ILM policy] -endif::[] + Set to `true` (default) to enable the cleaner service. If this setting is `true`, the `xpack.monitoring.enabled` setting must also be set to `true` with diff --git a/docs/reference/settings/ssl-settings.asciidoc b/docs/reference/settings/ssl-settings.asciidoc index 6d8ffd90b6a..a9c8576a8c4 100644 --- a/docs/reference/settings/ssl-settings.asciidoc +++ b/docs/reference/settings/ssl-settings.asciidoc @@ -38,13 +38,8 @@ endif::verifies[] Supported cipher suites can be found in Oracle's http://docs.oracle.com/javase/8/docs/technotes/guides/security/SunProviders.html[ Java Cryptography Architecture documentation]. Defaults to ``. -ifdef::asciidoctor[] [#{ssl-context}-tls-ssl-key-trusted-certificate-settings] ===== {component} TLS/SSL Key and Trusted Certificate Settings -endif::[] -ifndef::asciidoctor[] -===== anchor:{ssl-context}-tls-ssl-key-trusted-certificate-settings[] {component} TLS/SSL Key and Trusted Certificate Settings -endif::[] The following settings are used to specify a private key, certificate, and the trusted certificates that should be used when communicating over an SSL/TLS connection. @@ -110,13 +105,8 @@ Password to the truststore. +{ssl-prefix}.ssl.truststore.secure_password+ (<>):: Password to the truststore. -ifdef::asciidoctor[] [#{ssl-context}-pkcs12-files] ===== PKCS#12 Files -endif::[] -ifndef::asciidoctor[] -===== anchor:{ssl-context}-pkcs12-files[] PKCS#12 Files -endif::[] {es} can be configured to use PKCS#12 container files (`.p12` or `.pfx` files) that contain the private key, certificate and certificates that should be trusted. @@ -154,13 +144,8 @@ Password to the PKCS#12 file. +{ssl-prefix}.ssl.truststore.secure_password+ (<>):: Password to the PKCS#12 file. -ifdef::asciidoctor[] [#{ssl-context}-pkcs11-tokens] ===== PKCS#11 Tokens -endif::[] -ifndef::asciidoctor[] -===== anchor:{ssl-context}-pkcs11-tokens[] PKCS#11 Tokens -endif::[] {es} can be configured to use a PKCS#11 token that contains the private key, certificate and certificates that should be trusted. From 0a37dd7a869433d29f45379f49cab6baa20422b9 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Fri, 31 May 2019 11:08:54 -0400 Subject: [PATCH 024/210] [DOCS] Remove unneeded `ifdef::asciidoctor[]` conditionals (#42758) Several `ifdef::asciidoctor` conditionals were added so that AsciiDoc and Asciidoctor doc builds rendered consistently. With https://github.com/elastic/docs/pull/827, Elasticsearch Reference documentation migrated completely to Asciidoctor. We no longer need to support AsciiDoc so we can remove these conditionals. Resolves #41722 --- .../aggregations/pipeline/movavg-aggregation.asciidoc | 7 ------- 1 file changed, 7 deletions(-) diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index 5d0a4b1fb6b..3c5d5d0cff7 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -1,14 +1,7 @@ [[search-aggregations-pipeline-movavg-aggregation]] === Moving Average Aggregation -ifdef::asciidoctor[] deprecated:[6.4.0, "The Moving Average aggregation has been deprecated in favor of the more general <>. The new Moving Function aggregation provides all the same functionality as the Moving Average aggregation, but also provides more flexibility."] -endif::[] -ifndef::asciidoctor[] -deprecated[6.4.0, The Moving Average aggregation has been deprecated in favor of the more general -<>. The new Moving Function aggregation provides -all the same functionality as the Moving Average aggregation, but also provides more flexibility.] -endif::[] Given an ordered series of data, the Moving Average aggregation will slide a window across the data and emit the average value of that window. For example, given the data `[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]`, we can calculate a simple moving From f22dcfb9dae79bd0689ea07a41a9b0c2a2172e06 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 31 May 2019 10:55:35 -0500 Subject: [PATCH 025/210] [ML] [Data Frame] nesting group_by fields like other aggs (#42718) (#42760) --- .../dataframe/integration/DataFramePivotRestIT.java | 12 ++++++++---- .../transforms/pivot/AggregationResultUtils.java | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 3c661a0f4ac..36f95e599ff 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -251,10 +251,10 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase { config += " \"pivot\": {" + " \"group_by\": {" - + " \"reviewer\": {\"terms\": { \"field\": \"user_id\" }}," + + " \"user.id\": {\"terms\": { \"field\": \"user_id\" }}," + " \"by_day\": {\"date_histogram\": {\"fixed_interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd\"}}}," + " \"aggregations\": {" - + " \"avg_rating\": {" + + " \"user.avg_rating\": {" + " \"avg\": {" + " \"field\": \"stars\"" + " } } } }" @@ -265,10 +265,14 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase { List> preview = (List>)previewDataframeResponse.get("preview"); // preview is limited to 100 assertThat(preview.size(), equalTo(100)); - Set expectedFields = new HashSet<>(Arrays.asList("reviewer", "by_day", "avg_rating")); + Set expectedTopLevelFields = new HashSet<>(Arrays.asList("user", "by_day")); + Set expectedNestedFields = new HashSet<>(Arrays.asList("id", "avg_rating")); preview.forEach(p -> { Set keys = p.keySet(); - assertThat(keys, equalTo(expectedFields)); + assertThat(keys, equalTo(expectedTopLevelFields)); + Map nestedObj = (Map)p.get("user"); + keys = nestedObj.keySet(); + assertThat(keys, equalTo(expectedNestedFields)); }); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java index f8857591b23..6201dd936ba 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java @@ -61,7 +61,7 @@ public final class AggregationResultUtils { groups.getGroups().keySet().forEach(destinationFieldName -> { Object value = bucket.getKey().get(destinationFieldName); idGen.add(destinationFieldName, value); - document.put(destinationFieldName, value); + updateDocument(document, destinationFieldName, value); }); List aggNames = aggregationBuilders.stream().map(AggregationBuilder::getName).collect(Collectors.toList()); From 61c6a26b3146916a49a24324a5efa06447a5b13a Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 31 May 2019 12:06:52 -0400 Subject: [PATCH 026/210] Remove locale-dependent string checking We were checking if an exception was caused by a specific reason "Not a directory". Alas, this reason is locale-dependent and can fail on systems that are not set to en_US.UTF-8. This commit addresses this by deriving what the locale-dependent error message would be and using that for comparison with the actual exception thrown. Relates #41689 --- .../org/elasticsearch/plugins/PluginsServiceTests.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 78194888ebc..9a9775dc700 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -171,7 +171,15 @@ public class PluginsServiceTests extends ESTestCase { if (Constants.WINDOWS) { assertThat(e.getCause(), instanceOf(NoSuchFileException.class)); } else { - assertThat(e.getCause(), hasToString(containsString("Not a directory"))); + // force a "Not a directory" exception to be thrown so that we can extract the locale-dependent message + final String expected; + try (InputStream ignored = Files.newInputStream(desktopServicesStore.resolve("not-a-directory"))) { + throw new AssertionError(); + } catch (final FileSystemException inner) { + // locale-dependent translation of "Not a directory" + expected = inner.getReason(); + } + assertThat(e.getCause(), hasToString(containsString(expected))); } } } From d0da30e5f44d77bd1a9923f2d7b8a04870b10750 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 31 May 2019 13:14:18 +0100 Subject: [PATCH 027/210] Return NO_INTERVALS rather than null from empty TokenStream (#42750) IntervalBuilder#analyzeText will currently return null if it is passed an empty TokenStream, which can lead to a confusing NullPointerException later on during querying. This commit changes the code to return NO_INTERVALS instead. Fixes #42587 --- .../java/org/elasticsearch/index/query/IntervalBuilder.java | 4 ++-- .../org/elasticsearch/index/query/IntervalBuilderTests.java | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index 92b4fa66419..5e104768484 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -96,7 +96,7 @@ public class IntervalBuilder { // formulate a single term, boolean, or phrase. if (numTokens == 0) { - return null; + return NO_INTERVALS; } else if (numTokens == 1) { // single term return analyzeTerm(stream); @@ -231,7 +231,7 @@ public class IntervalBuilder { return clauses; } - private static final IntervalsSource NO_INTERVALS = new IntervalsSource() { + static final IntervalsSource NO_INTERVALS = new IntervalsSource() { @Override public IntervalIterator intervals(String field, LeafReaderContext ctx) { diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java index 15ec8af0af2..69464edb513 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java @@ -110,6 +110,12 @@ public class IntervalBuilderTests extends ESTestCase { } + public void testEmptyTokenStream() throws IOException { + CannedTokenStream ts = new CannedTokenStream(); + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), 0, true); + assertSame(IntervalBuilder.NO_INTERVALS, source); + } + public void testSimpleSynonyms() throws IOException { CannedTokenStream ts = new CannedTokenStream( From f51f8ed04ccf0e74d4273c36cb00e5f71cb4794b Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Fri, 31 May 2019 13:03:41 -0400 Subject: [PATCH 028/210] [DOCS] Remove unneeded options from `[source,sql]` code blocks (#42759) In AsciiDoc, `subs="attributes,callouts,macros"` options were required to render `include-tagged::` in a code block. With elastic/docs#827, Elasticsearch Reference documentation migrated from AsciiDoc to Asciidoctor. In Asciidoctor, the `subs="attributes,callouts,macros"` options are no longer needed to render `include-tagged::` in a code block. This commit removes those unneeded options. Resolves #41589 --- .../sql/functions/conditional.asciidoc | 42 +++++----- .../sql/functions/date-time.asciidoc | 80 +++++++++---------- .../reference/sql/functions/grouping.asciidoc | 10 +-- .../sql/functions/like-rlike.asciidoc | 4 +- .../sql/functions/operators.asciidoc | 36 ++++----- docs/reference/sql/functions/search.asciidoc | 16 ++-- docs/reference/sql/functions/string.asciidoc | 42 +++++----- docs/reference/sql/functions/system.asciidoc | 4 +- .../sql/functions/type-conversion.asciidoc | 10 +-- docs/reference/sql/language/indices.asciidoc | 12 +-- .../syntax/commands/describe-table.asciidoc | 2 +- .../language/syntax/commands/select.asciidoc | 58 +++++++------- .../syntax/commands/show-columns.asciidoc | 2 +- .../syntax/commands/show-functions.asciidoc | 10 +-- .../syntax/commands/show-tables.asciidoc | 12 +-- docs/reference/sql/security.asciidoc | 2 +- .../qa/src/main/resources/docs/docs.csv-spec | 4 +- 17 files changed, 173 insertions(+), 173 deletions(-) diff --git a/docs/reference/sql/functions/conditional.asciidoc b/docs/reference/sql/functions/conditional.asciidoc index f57878107b6..d25d50d0674 100644 --- a/docs/reference/sql/functions/conditional.asciidoc +++ b/docs/reference/sql/functions/conditional.asciidoc @@ -33,17 +33,17 @@ If the condition’s result is true, the value of the result expression that fol the subsequent when clauses will be skipped and not processed. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[case] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[caseReturnNull] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[caseWithElse] ---- @@ -70,12 +70,12 @@ CASE WHEN expression = value1 THEN result1 END ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[caseWithOperand] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[caseWithOperandAndElse] ---- @@ -155,12 +155,12 @@ If all arguments are null, then it returns `null`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[coalesceReturnNonNull] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[coalesceReturnNull] ---- @@ -199,12 +199,12 @@ If all arguments are null, then it returns `null`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[greatestReturnNonNull] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[greatestReturnNull] ---- @@ -237,12 +237,12 @@ If all arguments are null, then it returns `null`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[ifNullReturnFirst] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[ifNullReturnSecond] ---- @@ -276,12 +276,12 @@ logic of programming languages. If the 3rd expression is not provided and the co `null` is returned. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[iifWithDefaultValue] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[iifWithoutDefaultValue] ---- @@ -324,12 +324,12 @@ If all arguments are null, then it returns `null`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[isNullReturnFirst] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[isNullReturnSecond] ---- @@ -369,12 +369,12 @@ If all arguments are null, then it returns `null`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[leastReturnNonNull] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[leastReturnNull] ---- @@ -406,12 +406,12 @@ Returns `null` when the two input expressions are equal and if not, it returns the 1st expression. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[nullIfReturnFirst] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[nullIfReturnNull] ---- @@ -445,12 +445,12 @@ If all arguments are null, then it returns `null`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[nvlReturnFirst] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[nvlReturnSecond] ---- diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index d9d5e7bcf14..45231393521 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -57,32 +57,32 @@ s|Description Basic arithmetic operators (`+`, `-`, etc) support date/time parameters as indicated below: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dtIntervalPlusInterval] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dtDateTimePlusInterval] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dtMinusInterval] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dtIntervalMinusInterval] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dtDateTimeMinusInterval] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dtIntervalMul] -------------------------------------------------- @@ -116,17 +116,17 @@ Unlike CURRENT_DATE, `CURDATE()` can only be used as a function with no argument This method always returns the same value for its every occurrence within the same query. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[currentDate] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[currentDateFunction] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[curDateFunction] -------------------------------------------------- @@ -134,7 +134,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[curDateFunction] Typically, this function (as well as its twin <> function is used for relative date filtering: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[filterToday] -------------------------------------------------- @@ -165,29 +165,29 @@ meaning a milliseconds precision current time will be returned. This method always returns the same value for its every occurrence within the same query. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[currentTime] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[currentTimeFunction] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[curTimeFunction] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[currentTimeFunctionPrecision] -------------------------------------------------- Typically, this function is used for relative date/time filtering: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[filterCurrentTime] -------------------------------------------------- @@ -221,17 +221,17 @@ meaning a milliseconds precision current date/time will be returned. This method always returns the same value for its every occurrence within the same query. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[curTs] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[curTsFunction] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[curTsFunctionPrecision] -------------------------------------------------- @@ -239,7 +239,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[curTsFunctionPrecision] Typically, this function (as well as its twin <> function is used for relative date/time filtering: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[filterNow] -------------------------------------------------- @@ -267,7 +267,7 @@ DAY_OF_MONTH(datetime_exp) <1> Extract the day of the month from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dayOfMonth] -------------------------------------------------- @@ -291,7 +291,7 @@ DAY_OF_WEEK(datetime_exp) <1> Extract the day of the week from a date/datetime. Sunday is `1`, Monday is `2`, etc. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dayOfWeek] -------------------------------------------------- @@ -315,7 +315,7 @@ DAY_OF_YEAR(datetime_exp) <1> Extract the day of the year from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dayOfYear] -------------------------------------------------- @@ -339,7 +339,7 @@ DAY_NAME(datetime_exp) <1> Extract the day of the week from a date/datetime in text format (`Monday`, `Tuesday`...). -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dayName] -------------------------------------------------- @@ -363,7 +363,7 @@ HOUR_OF_DAY(datetime_exp) <1> Extract the hour of the day from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[hourOfDay] -------------------------------------------------- @@ -388,7 +388,7 @@ ISO_DAY_OF_WEEK(datetime_exp) <1> Extract the day of the week from a date/datetime, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. Monday is `1`, Tuesday is `2`, etc. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[isoDayOfWeek] -------------------------------------------------- @@ -413,7 +413,7 @@ ISO_WEEK_OF_YEAR(datetime_exp) <1> Extract the week of the year from a date/datetime, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week of a year is the first week with a majority (4 or more) of its days in January. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[isoWeekOfYear] -------------------------------------------------- @@ -437,7 +437,7 @@ MINUTE_OF_DAY(datetime_exp) <1> Extract the minute of the day from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[minuteOfDay] -------------------------------------------------- @@ -461,7 +461,7 @@ MINUTE_OF_HOUR(datetime_exp) <1> Extract the minute of the hour from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[minuteOfHour] -------------------------------------------------- @@ -485,7 +485,7 @@ MONTH(datetime_exp) <1> Extract the month of the year from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[monthOfYear] -------------------------------------------------- @@ -509,7 +509,7 @@ MONTH_NAME(datetime_exp) <1> Extract the month from a date/datetime in text format (`January`, `February`...). -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[monthName] -------------------------------------------------- @@ -533,7 +533,7 @@ This function offers the same functionality as <> function is used for relative date/time filtering: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[filterNow] -------------------------------------------------- @@ -565,7 +565,7 @@ SECOND_OF_MINUTE(datetime_exp) <1> Extract the second of the minute from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[secondOfMinute] -------------------------------------------------- @@ -589,7 +589,7 @@ QUARTER(datetime_exp) <1> Extract the year quarter the date/datetime falls in. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[quarter] -------------------------------------------------- @@ -613,7 +613,7 @@ This function offers the same functionality as <> function is used for relative date filtering: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[filterToday] -------------------------------------------------- @@ -645,7 +645,7 @@ WEEK_OF_YEAR(datetime_exp) <1> Extract the week of the year from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[weekOfYear] -------------------------------------------------- @@ -669,7 +669,7 @@ YEAR(datetime_exp) <1> Extract the year from a date/datetime. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[year] -------------------------------------------------- @@ -697,14 +697,14 @@ EXTRACT( Extract fields from a date/datetime by specifying the name of a <>. The following -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[extractDayOfYear] -------------------------------------------------- is the equivalent to -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[dayOfYear] -------------------------------------------------- diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc index 0a498a1aace..6f2f5a1b6e4 100644 --- a/docs/reference/sql/functions/grouping.asciidoc +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -44,14 +44,14 @@ NOTE:: The histogram in SQL does *NOT* return empty buckets for missing interval `Histogram` can be applied on either numeric fields: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[histogramNumeric] ---- or date/time fields: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[histogramDateTime] ---- @@ -59,14 +59,14 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[histogramDateTime] Expressions inside the histogram are also supported as long as the return type is numeric: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[histogramNumericExpression] ---- Do note that histograms (and grouping functions in general) allow custom expressions but cannot have any functions applied to them in the `GROUP BY`. In other words, the following statement is *NOT* allowed: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[expressionOnHistogramNotAllowed] ---- @@ -75,7 +75,7 @@ as it requires two groupings (one for histogram followed by a second for applyin Instead one can rewrite the query to move the expression on the histogram _inside_ of it: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[histogramDateTimeExpression] ---- diff --git a/docs/reference/sql/functions/like-rlike.asciidoc b/docs/reference/sql/functions/like-rlike.asciidoc index 73212bc1135..2d5ef0b62f9 100644 --- a/docs/reference/sql/functions/like-rlike.asciidoc +++ b/docs/reference/sql/functions/like-rlike.asciidoc @@ -38,7 +38,7 @@ with the `LIKE` operator: The percent sign represents zero, one or multiple characters. The underscore represents a single number or character. These symbols can be used in combinations. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[simpleLike] ---- @@ -75,7 +75,7 @@ and underscore (`_`); the pattern in this case is a regular expression which all For more details about the regular expressions syntax, https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/regex/Pattern.html[Java's Pattern class javadoc] is a good starting point. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[simpleRLike] ---- diff --git a/docs/reference/sql/functions/operators.asciidoc b/docs/reference/sql/functions/operators.asciidoc index 4b7e8990290..02841c84b58 100644 --- a/docs/reference/sql/functions/operators.asciidoc +++ b/docs/reference/sql/functions/operators.asciidoc @@ -8,7 +8,7 @@ Boolean operator for comparing against one or multiple expressions. [[sql-operators-equality]] ==== `Equality (=)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldEquality] -------------------------------------------------- @@ -16,12 +16,12 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldEquality] [[sql-operators-null-safe-equality]] ==== `Null safe Equality (<=>)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareWithNull] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareTwoNulls] -------------------------------------------------- @@ -29,7 +29,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareTwoNulls] [[sql-operators-inequality]] ==== `Inequality (<> or !=)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldNonEquality] -------------------------------------------------- @@ -37,7 +37,7 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldNonEquality] [[sql-operators-comparison]] ==== `Comparison (<, <=, >, >=)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldLessThan] -------------------------------------------------- @@ -45,7 +45,7 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldLessThan] [[sql-operators-between]] ==== `BETWEEN` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereBetween] -------------------------------------------------- @@ -53,7 +53,7 @@ include-tagged::{sql-specs}/filter.sql-spec[whereBetween] [[sql-operators-is-null]] ==== `IS NULL/IS NOT NULL` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] -------------------------------------------------- @@ -61,7 +61,7 @@ include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] [[sql-operators-in]] ==== `IN (, , ...)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereWithInAndMultipleValues] -------------------------------------------------- @@ -74,7 +74,7 @@ Boolean operator for evaluating one or two expressions. [[sql-operators-and]] ==== `AND` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldAndComparison] -------------------------------------------------- @@ -82,7 +82,7 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldAndComparison] [[sql-operators-or]] ==== `OR` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldOrComparison] -------------------------------------------------- @@ -90,7 +90,7 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldOrComparison] [[sql-operators-not]] ==== `NOT` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldEqualityNot] -------------------------------------------------- @@ -104,7 +104,7 @@ The result is a value of numeric type. [[sql-operators-plus]] ==== `Add (+)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[plus] -------------------------------------------------- @@ -112,7 +112,7 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[plus] [[sql-operators-subtract]] ==== `Subtract (infix -)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[minus] -------------------------------------------------- @@ -120,7 +120,7 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[minus] [[sql-operators-negate]] ==== `Negate (unary -)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[unaryMinus] -------------------------------------------------- @@ -128,7 +128,7 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[unaryMinus] [[sql-operators-multiply]] ==== `Multiply (*)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[multiply] -------------------------------------------------- @@ -136,7 +136,7 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[multiply] [[sql-operators-divide]] ==== `Divide (/)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[divide] -------------------------------------------------- @@ -144,7 +144,7 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[divide] [[sql-operators-remainder]] ==== `Modulo or Remainder(%)` -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[mod] -------------------------------------------------- @@ -157,7 +157,7 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[mod] `::` provides an alternative syntax to the <> function. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[conversionStringToLongCastOperator] -------------------------------------------------- diff --git a/docs/reference/sql/functions/search.asciidoc b/docs/reference/sql/functions/search.asciidoc index 6990f6669d6..0036523c120 100644 --- a/docs/reference/sql/functions/search.asciidoc +++ b/docs/reference/sql/functions/search.asciidoc @@ -33,7 +33,7 @@ and <> {es} queries. The first parameter is the field or fields to match against. In case it receives one value only, {es-sql} will use a `match` query to perform the search: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[simpleMatch] ---- @@ -41,7 +41,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[simpleMatch] However, it can also receive a list of fields and their corresponding optional `boost` value. In this case, {es-sql} will use a `multi_match` query to match the documents: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[multiFieldsMatch] ---- @@ -53,7 +53,7 @@ the final score than the `author` field when searching for `frank dune` text in Both options above can be used in combination with the optional third parameter of the `MATCH()` predicate, where one can specify additional configuration parameters (separated by semicolon `;`) for either `match` or `multi_match` queries. For example: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[optionalParamsForMatch] ---- @@ -95,14 +95,14 @@ Just like `MATCH`, `QUERY` is a full-text search predicate that gives the user c The first parameter is basically the input that will be passed as is to the `query_string` query, which means that anything that `query_string` accepts in its `query` field can be used here as well: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[simpleQueryQuery] ---- A more advanced example, showing more of the features that `query_string` supports, of course possible with {es-sql}: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[advancedQueryQuery] ---- @@ -113,7 +113,7 @@ regex and fuzziness queries for the `name` field. If one needs to customize various configuration options that `query_string` exposes, this can be done using the second _optional_ parameter. Multiple settings can be specified separated by a semicolon `;`: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[optionalParameterQuery] ---- @@ -149,14 +149,14 @@ combined using the same rules as {es}'s Typically `SCORE` is used for ordering the results of a query based on their relevance: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[orderByScore] ---- However, it is perfectly fine to return the score without sorting by it: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[scoreWithMatch] ---- diff --git a/docs/reference/sql/functions/string.asciidoc b/docs/reference/sql/functions/string.asciidoc index 7acc3587635..a82ac66adce 100644 --- a/docs/reference/sql/functions/string.asciidoc +++ b/docs/reference/sql/functions/string.asciidoc @@ -24,7 +24,7 @@ ASCII(string_exp) <1> Returns the ASCII code value of the leftmost character of `string_exp` as an integer. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringAscii] -------------------------------------------------- @@ -47,7 +47,7 @@ BIT_LENGTH(string_exp) <1> Returns the length in bits of the `string_exp` input expression. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringBitLength] -------------------------------------------------- @@ -70,7 +70,7 @@ CHAR(code) <1> Returns the character that has the ASCII code value specified by the numeric input. The value should be between 0 and 255; otherwise, the return value is data source–dependent. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringChar] -------------------------------------------------- @@ -93,7 +93,7 @@ CHAR_LENGTH(string_exp) <1> Returns the length in characters of the input, if the string expression is of a character data type; otherwise, returns the length in bytes of the string expression (the smallest integer not less than the number of bits divided by 8). -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringCharLength] -------------------------------------------------- @@ -119,7 +119,7 @@ CONCAT( Returns a character string that is the result of concatenating `string_exp1` to `string_exp2`. If one of the string is `NULL`, the other string will be returned. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringConcat] -------------------------------------------------- @@ -149,7 +149,7 @@ INSERT( Returns a string where `length` characters have been deleted from `source`, beginning at `start`, and where `replacement` has been inserted into `source`, beginning at `start`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringInsert] -------------------------------------------------- @@ -172,7 +172,7 @@ LCASE(string_exp) <1> Returns a string equal to that in `string_exp`, with all uppercase characters converted to lowercase. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringLCase] -------------------------------------------------- @@ -198,7 +198,7 @@ LEFT( Returns the leftmost count characters of `string_exp`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringLeft] -------------------------------------------------- @@ -221,7 +221,7 @@ LENGTH(string_exp) <1> Returns the number of characters in `string_exp`, excluding trailing blanks. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringLength] -------------------------------------------------- @@ -250,12 +250,12 @@ LOCATE( Returns the starting position of the first occurrence of `pattern` within `source`. The search for the first occurrence of `pattern` begins with the first character position in `source` unless the optional argument, `start`, is specified. If `start` is specified, the search begins with the character position indicated by the value of `start`. The first character position in `source` is indicated by the value 1. If `pattern` is not found within `source`, the value 0 is returned. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringLocateWoStart] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringLocateWithStart] -------------------------------------------------- @@ -278,7 +278,7 @@ LTRIM(string_exp) <1> Returns the characters of `string_exp`, with leading blanks removed. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringLTrim] -------------------------------------------------- @@ -301,7 +301,7 @@ OCTET_LENGTH(string_exp) <1> Returns the length in bytes of the `string_exp` input expression. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringOctetLength] -------------------------------------------------- @@ -327,7 +327,7 @@ POSITION( Returns the position of the `string_exp1` in `string_exp2`. The result is an exact numeric. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringPosition] -------------------------------------------------- @@ -353,7 +353,7 @@ REPEAT( Returns a character string composed of `string_exp` repeated `count` times. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringRepeat] -------------------------------------------------- @@ -381,7 +381,7 @@ REPLACE( Search `source` for occurrences of `pattern`, and replace with `replacement`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringReplace] -------------------------------------------------- @@ -407,7 +407,7 @@ RIGHT( Returns the rightmost count characters of `string_exp`. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringRight] -------------------------------------------------- @@ -430,7 +430,7 @@ RTRIM(string_exp) <1> Returns the characters of `string_exp` with trailing blanks removed. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringRTrim] -------------------------------------------------- @@ -453,7 +453,7 @@ SPACE(count) <1> Returns a character string consisting of `count` spaces. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringSpace] -------------------------------------------------- @@ -481,7 +481,7 @@ SUBSTRING( Returns a character string that is derived from `source`, beginning at the character position specified by `start` for `length` characters. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringSubString] -------------------------------------------------- @@ -504,7 +504,7 @@ UCASE(string_exp) <1> Returns a string equal to that of the input, with all lowercase characters converted to uppercase. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringUCase] -------------------------------------------------- diff --git a/docs/reference/sql/functions/system.asciidoc b/docs/reference/sql/functions/system.asciidoc index dfca7d526d3..b2d604728c1 100644 --- a/docs/reference/sql/functions/system.asciidoc +++ b/docs/reference/sql/functions/system.asciidoc @@ -24,7 +24,7 @@ Returns the name of the database being queried. In the case of Elasticsearch SQL is the name of the Elasticsearch cluster. This function should always return a non-null value. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[database] -------------------------------------------------- @@ -46,7 +46,7 @@ USER() Returns the username of the authenticated user executing the query. This function can return `null` in case {stack-ov}/elasticsearch-security.html[Security] is disabled. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[user] -------------------------------------------------- diff --git a/docs/reference/sql/functions/type-conversion.asciidoc b/docs/reference/sql/functions/type-conversion.asciidoc index 7f8488be40f..c6c76130551 100644 --- a/docs/reference/sql/functions/type-conversion.asciidoc +++ b/docs/reference/sql/functions/type-conversion.asciidoc @@ -25,17 +25,17 @@ Casts the result of the given expression to the target <> with slightly differen Moreover, apart from the standard <> it supports the corresponding https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/explicit-data-type-conversion-function?view=sql-server-2017[ODBC data types]. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[conversionStringToIntConvertODBCDataType] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[conversionStringToIntConvertESDataType] ---- diff --git a/docs/reference/sql/language/indices.asciidoc b/docs/reference/sql/language/indices.asciidoc index 82c7f30fb04..8f48177ce03 100644 --- a/docs/reference/sql/language/indices.asciidoc +++ b/docs/reference/sql/language/indices.asciidoc @@ -14,7 +14,7 @@ is supported _as long_ as it is quoted or escaped as a table identifier. For example: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesEsMultiIndex] ---- @@ -28,7 +28,7 @@ The same kind of patterns can also be used to query multiple indices or tables. For example: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[fromTablePatternQuoted] ---- @@ -44,7 +44,7 @@ or multiple `%` characters. Using `SHOW TABLES` command again: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesLikeWildcard] ---- @@ -53,7 +53,7 @@ The pattern matches all tables that start with `emp`. This command supports _escaping_ as well, for example: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesLikeEscape] ---- @@ -101,13 +101,13 @@ Set to `true` properties `index_include_frozen` in the <> or `index.in dedicated keyword:: Explicitly perform the inclusion through the dedicated `FROZEN` keyword in the `FROM` clause or `INCLUDE FROZEN` in the `SHOW` commands: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesIncludeFrozen] ---- -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[fromTableIncludeFrozen] ---- diff --git a/docs/reference/sql/language/syntax/commands/describe-table.asciidoc b/docs/reference/sql/language/syntax/commands/describe-table.asciidoc index da02f1fa238..9aad578da47 100644 --- a/docs/reference/sql/language/syntax/commands/describe-table.asciidoc +++ b/docs/reference/sql/language/syntax/commands/describe-table.asciidoc @@ -30,7 +30,7 @@ DESC `DESC` and `DESCRIBE` are aliases to <>. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[describeTable] ---- diff --git a/docs/reference/sql/language/syntax/commands/select.asciidoc b/docs/reference/sql/language/syntax/commands/select.asciidoc index 08ebe0ae964..0a4922a3cff 100644 --- a/docs/reference/sql/language/syntax/commands/select.asciidoc +++ b/docs/reference/sql/language/syntax/commands/select.asciidoc @@ -36,7 +36,7 @@ The general execution of `SELECT` is as follows: As with a table, every output column of a `SELECT` has a name which can be either specified per column through the `AS` keyword : -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[selectColumnAlias] ---- @@ -46,14 +46,14 @@ which is why it is recommended to specify it. assigned by {es-sql} if no name is given: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[selectInline] ---- or if it's a simple column reference, use its name as the column name: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[selectColumn] ---- @@ -63,7 +63,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[selectColumn] To select all the columns in the source, one can use `*`: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[wildcardWithOrder] ---- @@ -89,14 +89,14 @@ Represents the name (optionally qualified) of an existing table, either a concre If the table name contains special SQL characters (such as `.`,`-`,`*`,etc...) use double quotes to escape them: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[fromTableQuoted] ---- The name can be a <> pointing to multiple indices (likely requiring quoting as mentioned above) with the restriction that *all* resolved concrete tables have **exact mapping**. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[fromTablePatternQuoted] ---- @@ -104,7 +104,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[fromTablePatternQuoted] `alias`:: A substitute name for the `FROM` item containing the alias. An alias is used for brevity or to eliminate ambiguity. When an alias is provided, it completely hides the actual name of the table and must be used in its place. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[fromTableAlias] ---- @@ -125,7 +125,7 @@ where: Represents an expression that evaluates to a `boolean`. Only the rows that match the condition (to `true`) are returned. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[basicWhere] ---- @@ -148,34 +148,34 @@ Represents an expression on which rows are being grouped _on_. It can be a colum A common, group by column name: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByColumn] ---- Grouping by output ordinal: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByOrdinal] ---- Grouping by alias: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByAlias] ---- And grouping by column expression (typically used along-side an alias): -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByExpression] ---- Or a mixture of the above: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByMulti] ---- @@ -185,21 +185,21 @@ When a `GROUP BY` clause is used in a `SELECT`, _all_ output expressions must be To wit: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByAndAgg] ---- Expressions over aggregates used in output: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByAndAggExpression] ---- Multiple aggregates used: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByAndMultipleAggs] ---- @@ -216,14 +216,14 @@ As such, the query emits only a single row (as there is only a single group). A common example is counting the number of records: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByImplicitCount] ---- Of course, multiple aggregations can be applied: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByImplicitMultipleAggs] ---- @@ -249,14 +249,14 @@ Both `WHERE` and `HAVING` are used for filtering however there are several signi . `WHERE` works on individual *rows*, `HAVING` works on the *groups* created by ``GROUP BY`` . `WHERE` is evaluated *before* grouping, `HAVING` is evaluated *after* grouping -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByHaving] ---- Further more, one can use multiple aggregate expressions inside `HAVING` even ones that are not used in the output (`SELECT`): -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByHavingMultiple] ---- @@ -269,14 +269,14 @@ As such, the query emits only a single row (as there is only a single group) and In this example, `HAVING` matches: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByHavingImplicitMatch] ---- //However `HAVING` can also not match, in which case an empty result is returned: // -//["source","sql",subs="attributes,callouts,macros"] +//[source, sql] //---- //include-tagged::{sql-specs}/docs/docs.csv-spec[groupByHavingImplicitNoMatch] //---- @@ -304,7 +304,7 @@ IMPORTANT: When used along-side, `GROUP BY` expression can point _only_ to the c For example, the following query sorts by an arbitrary input field (`page_count`): -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[orderByBasic] ---- @@ -318,20 +318,20 @@ NOTE: With `GROUP BY`, make sure the ordering targets the resulting group - appl For example, to order groups simply indicate the grouping key: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[orderByGroup] ---- Multiple keys can be specified of course: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[groupByMulti] ---- Further more, it is possible to order groups based on aggregations of their values: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[orderByAgg] ---- @@ -352,7 +352,7 @@ combined using the same rules as {es}'s To sort based on the `score`, use the special function `SCORE()`: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[orderByScore] ---- @@ -360,7 +360,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[orderByScore] Note that you can return `SCORE()` by using a full-text search predicate in the `WHERE` clause. This is possible even if `SCORE()` is not used for sorting: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[orderByScoreWithMatch] ---- @@ -387,7 +387,7 @@ ALL:: indicates there is no limit and thus all results are being returned. To return -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[limitBasic] ---- diff --git a/docs/reference/sql/language/syntax/commands/show-columns.asciidoc b/docs/reference/sql/language/syntax/commands/show-columns.asciidoc index b21c02358e5..9cb90af6b65 100644 --- a/docs/reference/sql/language/syntax/commands/show-columns.asciidoc +++ b/docs/reference/sql/language/syntax/commands/show-columns.asciidoc @@ -21,7 +21,7 @@ patterns. List the columns in table and their data type (and other attributes). -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showColumns] ---- diff --git a/docs/reference/sql/language/syntax/commands/show-functions.asciidoc b/docs/reference/sql/language/syntax/commands/show-functions.asciidoc index 47c000e81d9..8689788867c 100644 --- a/docs/reference/sql/language/syntax/commands/show-functions.asciidoc +++ b/docs/reference/sql/language/syntax/commands/show-functions.asciidoc @@ -15,7 +15,7 @@ SHOW FUNCTIONS [LIKE pattern?]? <1> List all the SQL functions and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showFunctions] ---- @@ -23,25 +23,25 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[showFunctions] The list of functions returned can be customized based on the pattern. It can be an exact match: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showFunctionsLikeExact] ---- A wildcard for exactly one character: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showFunctionsLikeChar] ---- A wildcard matching zero or more characters: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showFunctionsLikeWildcard] ---- Or of course, a variation of the above: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showFunctionsWithPattern] ---- diff --git a/docs/reference/sql/language/syntax/commands/show-tables.asciidoc b/docs/reference/sql/language/syntax/commands/show-tables.asciidoc index 554819e24b1..d5a40337713 100644 --- a/docs/reference/sql/language/syntax/commands/show-tables.asciidoc +++ b/docs/reference/sql/language/syntax/commands/show-tables.asciidoc @@ -24,7 +24,7 @@ patterns. List the tables available to the current user and their type. -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTables] ---- @@ -32,7 +32,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[showTables] Match multiple indices by using {es} <> notation: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesEsMultiIndex] ---- @@ -40,26 +40,26 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesEsMultiIndex] One can also use the `LIKE` clause to restrict the list of names to the given pattern. The pattern can be an exact match: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesLikeExact] ---- Multiple chars: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesLikeWildcard] ---- A single char: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesLikeOneChar] ---- Or a mixture of single and multiple chars: -["source","sql",subs="attributes,callouts,macros"] +[source, sql] ---- include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesLikeMixed] ---- diff --git a/docs/reference/sql/security.asciidoc b/docs/reference/sql/security.asciidoc index ad946c33e2d..cbf41b46997 100644 --- a/docs/reference/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -33,7 +33,7 @@ the API require `cluster:monitor/main`. The following example configures a role that can run SQL in JDBC querying the `test` and `bort` indices: -["source","yaml",subs="attributes,callouts,macros"] +[source, yaml] -------------------------------------------------- include-tagged::{sql-tests}/security/roles.yml[cli_drivers] -------------------------------------------------- diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 936c7eef881..1cd3b92fece 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -643,9 +643,9 @@ M |57 ; groupByAndAggExpression -// tag::groupByAndAggExpression schema::g:s|salary:i -SELECT gender AS g, ROUND( (MIN(salary) / 100) ) AS salary FROM emp GROUP BY gender; +// tag::groupByAndAggExpression +SELECT gender AS g, ROUND((MIN(salary) / 100)) AS salary FROM emp GROUP BY gender; g | salary ---------------+--------------- From 3a00d08c5031ea193c484e8401500e50cc9d161c Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 31 May 2019 08:53:59 -0700 Subject: [PATCH 029/210] Clarify that inner_hits must be used to access nested fields. (#42724) This PR updates the docs for `docvalue_fields` and `stored_fields` to clarify that nested fields must be accessed through `inner_hits`. It also tweaks the nested fields documentation to make this point more visible. Addresses #23766. --- docs/reference/mapping/types/nested.asciidoc | 31 ++++++++++--------- .../search/request/docvalue-fields.asciidoc | 4 +++ .../search/request/stored-fields.asciidoc | 5 +++ 3 files changed, 25 insertions(+), 15 deletions(-) diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index de0f3f2a5f1..63bb4591369 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -159,6 +159,22 @@ Nested documents can be: * sorted with <>. * retrieved and highlighted with <>. +[IMPORTANT] +============================================= + +Because nested documents are indexed as separate documents, they can only be +accessed within the scope of the `nested` query, the +`nested`/`reverse_nested` aggregations, or <>. + +For instance, if a string field within a nested document has +<> set to `offsets` to allow use of the postings +during the highlighting, these offsets will not be available during the main highlighting +phase. Instead, highlighting needs to be performed via +<>. The same consideration applies when loading +fields during a search through <> +or <>. + +============================================= [[nested-params]] ==== Parameters for `nested` fields @@ -178,21 +194,6 @@ The following parameters are accepted by `nested` fields: may be added to an existing nested object. -[IMPORTANT] -============================================= - -Because nested documents are indexed as separate documents, they can only be -accessed within the scope of the `nested` query, the -`nested`/`reverse_nested` aggregations, or <>. - -For instance, if a string field within a nested document has -<> set to `offsets` to allow use of the postings -during the highlighting, these offsets will not be available during the main highlighting -phase. Instead, highlighting needs to be performed via -<>. - -============================================= - [float] === Limits on `nested` mappings and objects diff --git a/docs/reference/search/request/docvalue-fields.asciidoc b/docs/reference/search/request/docvalue-fields.asciidoc index 6697b5bb3e3..784cc940153 100644 --- a/docs/reference/search/request/docvalue-fields.asciidoc +++ b/docs/reference/search/request/docvalue-fields.asciidoc @@ -67,3 +67,7 @@ on their mappings: `long`, `double` and other numeric fields are formatted as numbers, `keyword` fields are formatted as strings, `date` fields are formatted with the configured `date` format, etc. +NOTE: On its own, `docvalue_fields` cannot be used to load fields in nested +objects -- if a field contains a nested object in its path, then no data will +be returned for that docvalue field. To access nested fields, `docvalue_fields` +must be used within an <> block. \ No newline at end of file diff --git a/docs/reference/search/request/stored-fields.asciidoc b/docs/reference/search/request/stored-fields.asciidoc index 195dc39f11e..b55e0fce457 100644 --- a/docs/reference/search/request/stored-fields.asciidoc +++ b/docs/reference/search/request/stored-fields.asciidoc @@ -49,6 +49,11 @@ Script fields can also be automatically detected and used as fields, so things like `_source.obj1.field1` can be used, though not recommended, as `obj1.field1` will work as well. +NOTE: On its own, `stored_fields` cannot be used to load fields in nested +objects -- if a field contains a nested object in its path, then no data will +be returned for that stored field. To access nested fields, `stored_fields` +must be used within an <> block. + ==== Disable stored fields entirely To disable the stored fields (and metadata fields) entirely use: `_none_`: From 9fdae169ac4ede7d94d17bfd28c55964c16dc021 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 31 May 2019 16:34:16 -0700 Subject: [PATCH 030/210] Use an anonymous inner class instead of lambda for UP-TO-DATE support --- .../java/org/elasticsearch/gradle/JdkDownloadPlugin.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java index a408b66ec81..d4f0d9941da 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java @@ -23,6 +23,7 @@ import org.gradle.api.Action; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; +import org.gradle.api.Task; import org.gradle.api.UnknownTaskException; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.ConfigurationContainer; @@ -165,7 +166,12 @@ public class JdkDownloadPlugin implements Plugin { } String extractDir = rootProject.getBuildDir().toPath().resolve("jdks/openjdk-" + jdkVersion + "_" + platform).toString(); TaskProvider extractTask = rootProject.getTasks().register(extractTaskName, Copy.class, copyTask -> { - copyTask.doFirst(t -> rootProject.delete(extractDir)); + copyTask.doFirst(new Action() { + @Override + public void execute(Task t) { + rootProject.delete(extractDir); + } + }); copyTask.into(extractDir); copyTask.from(fileGetter, removeRootDir); }); From 929215c0d54c07bf0bd59f7ab66a08b250129977 Mon Sep 17 00:00:00 2001 From: Christian Kotzbauer Date: Sat, 1 Jun 2019 13:52:47 +0200 Subject: [PATCH 031/210] Update release-notes.asciidoc (#42779) --- docs/reference/release-notes.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index b912d7b69f1..87761711dfa 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,6 +6,7 @@ This section summarizes the changes in each release. +* <> * <> * <> * <> From 2129d066430700176631893067d6f09babe324db Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 3 Jun 2019 09:16:54 +0100 Subject: [PATCH 032/210] Create client-only AnalyzeRequest/AnalyzeResponse classes (#42197) This commit clones the existing AnalyzeRequest/AnalyzeResponse classes to the high-level rest client, and adjusts request converters to use these new classes. This is a prerequisite to removing the Streamable interface from the internal server version of these classes. --- .../elasticsearch/client/IndicesClient.java | 4 +- .../client/IndicesRequestConverters.java | 2 +- .../client/RequestConverters.java | 2 +- .../client/indices/AnalyzeRequest.java | 343 ++++++++ .../client/indices/AnalyzeResponse.java | 183 ++++ .../client/indices/DetailAnalyzeResponse.java | 214 +++++ .../elasticsearch/client/IndicesClientIT.java | 8 +- .../client/IndicesRequestConvertersTests.java | 12 +- .../client/RequestConvertersTests.java | 12 +- .../IndicesClientDocumentationIT.java | 59 +- .../indices/AnalyzeGlobalRequestTests.java | 69 ++ .../indices/AnalyzeIndexRequestTests.java | 73 ++ .../client/indices/AnalyzeRequestTests.java | 54 ++ .../client/indices/AnalyzeResponseTests.java | 174 ++++ .../high-level/indices/analyze.asciidoc | 10 +- .../admin/indices/analyze/AnalyzeAction.java | 824 +++++++++++++++++- .../admin/indices/analyze/AnalyzeRequest.java | 307 ------- .../analyze/AnalyzeRequestBuilder.java | 7 +- .../indices/analyze/AnalyzeResponse.java | 320 ------- .../analyze/DetailAnalyzeResponse.java | 400 --------- .../analyze/TransportAnalyzeAction.java | 80 +- .../client/IndicesAdminClient.java | 7 +- .../client/support/AbstractClient.java | 6 +- .../admin/indices/RestAnalyzeAction.java | 106 +-- .../action/IndicesRequestIT.java | 3 +- .../indices/TransportAnalyzeActionTests.java | 57 +- .../indices/analyze/AnalyzeRequestTests.java | 8 +- .../indices/analyze/AnalyzeResponseTests.java | 109 +-- .../indices/analyze/AnalyzeActionIT.java | 40 +- .../admin/indices/RestAnalyzeActionTests.java | 61 +- .../security/action/SecurityActionMapper.java | 5 +- .../action/SecurityActionMapperTests.java | 9 +- 32 files changed, 2129 insertions(+), 1439 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index cbb1d95feae..a5a57e4d6b8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -22,8 +22,6 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -47,6 +45,8 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryReques import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.AnalyzeRequest; +import org.elasticsearch.client.indices.AnalyzeResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.client.indices.FreezeIndexRequest; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java index 5cbab843138..7a67fe71348 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -26,7 +26,6 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -41,6 +40,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 3ad72eedde9..5b183887bfd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.explain.ExplainRequest; @@ -52,6 +51,7 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.core.CountRequest; import org.elasticsearch.client.core.MultiTermVectorsRequest; import org.elasticsearch.client.core.TermVectorsRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Nullable; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java new file mode 100644 index 00000000000..1aed59227e8 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java @@ -0,0 +1,343 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * A request to analyze text + */ +public class AnalyzeRequest implements Validatable, ToXContentObject { + + private String index; + + private String[] text; + + private String analyzer; + + private NameOrDefinition tokenizer; + + private final List tokenFilters = new ArrayList<>(); + + private final List charFilters = new ArrayList<>(); + + private String field; + + private boolean explain = false; + + private String[] attributes = Strings.EMPTY_ARRAY; + + private String normalizer; + + /** + * Analyzes text using a global analyzer + */ + public static AnalyzeRequest withGlobalAnalyzer(String analyzer, String... text) { + return new AnalyzeRequest(null, analyzer, null, null, text); + } + + /** + * Analyzes text using a custom analyzer built from global components + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(String tokenizer) { + return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizer)); + } + + /** + * Analyzes text using a custom analyzer built from global components + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(Map tokenizerSettings) { + return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizerSettings)); + } + + /** + * Analyzes text using a custom analyzer built from components defined on an index + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, String tokenizer) { + return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizer)); + } + + /** + * Analyzes text using a custom analyzer built from components defined on an index + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, Map tokenizerSettings) { + return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizerSettings)); + } + + /** + * Analyzes text using a named analyzer on an index + */ + public static AnalyzeRequest withIndexAnalyzer(String index, String analyzer, String... text) { + return new AnalyzeRequest(index, analyzer, null, null, text); + } + + /** + * Analyzes text using the analyzer defined on a specific field within an index + */ + public static AnalyzeRequest withField(String index, String field, String... text) { + return new AnalyzeRequest(index, null, null, field, text); + } + + /** + * Analyzes text using a named normalizer on an index + */ + public static AnalyzeRequest withNormalizer(String index, String normalizer, String... text) { + return new AnalyzeRequest(index, null, normalizer, null, text); + } + + /** + * Analyzes text using a custom normalizer built from global components + */ + public static CustomAnalyzerBuilder buildCustomNormalizer() { + return new CustomAnalyzerBuilder(null, null); + } + + /** + * Analyzes text using a custom normalizer built from components defined on an index + */ + public static CustomAnalyzerBuilder buildCustomNormalizer(String index) { + return new CustomAnalyzerBuilder(index, null); + } + + /** + * Helper class to build custom analyzer definitions + */ + public static class CustomAnalyzerBuilder { + + final NameOrDefinition tokenizer; + final String index; + List charFilters = new ArrayList<>(); + List tokenFilters = new ArrayList<>(); + + CustomAnalyzerBuilder(String index, NameOrDefinition tokenizer) { + this.tokenizer = tokenizer; + this.index = index; + } + + public CustomAnalyzerBuilder addCharFilter(String name) { + charFilters.add(new NameOrDefinition(name)); + return this; + } + + public CustomAnalyzerBuilder addCharFilter(Map settings) { + charFilters.add(new NameOrDefinition(settings)); + return this; + } + + public CustomAnalyzerBuilder addTokenFilter(String name) { + tokenFilters.add(new NameOrDefinition(name)); + return this; + } + + public CustomAnalyzerBuilder addTokenFilter(Map settings) { + tokenFilters.add(new NameOrDefinition(settings)); + return this; + } + + public AnalyzeRequest build(String... text) { + return new AnalyzeRequest(index, tokenizer, charFilters, tokenFilters, text); + } + } + + private AnalyzeRequest(String index, String analyzer, String normalizer, String field, String... text) { + this.index = index; + this.analyzer = analyzer; + this.normalizer = normalizer; + this.field = field; + this.text = text; + } + + private AnalyzeRequest(String index, NameOrDefinition tokenizer, List charFilters, + List tokenFilters, String... text) { + this.index = index; + this.analyzer = null; + this.normalizer = null; + this.field = null; + this.tokenizer = tokenizer; + this.charFilters.addAll(charFilters); + this.tokenFilters.addAll(tokenFilters); + this.text = text; + } + + static class NameOrDefinition implements ToXContentFragment { + // exactly one of these two members is not null + public final String name; + public final Settings definition; + + NameOrDefinition(String name) { + this.name = Objects.requireNonNull(name); + this.definition = null; + } + + NameOrDefinition(Settings settings) { + this.name = null; + this.definition = Objects.requireNonNull(settings); + } + + NameOrDefinition(Map definition) { + this.name = null; + Objects.requireNonNull(definition); + try { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(definition); + this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (definition == null) { + return builder.value(name); + } + builder.startObject(); + definition.toXContent(builder, params); + builder.endObject(); + return builder; + } + + } + + /** + * Returns the index that the request should be executed against, or {@code null} if + * no index is specified + */ + public String index() { + return this.index; + } + + /** + * Returns the text to be analyzed + */ + public String[] text() { + return this.text; + } + + /** + * Returns the named analyzer used for analysis, if defined + */ + public String analyzer() { + return this.analyzer; + } + + /** + * Returns the named tokenizer used for analysis, if defined + */ + public String normalizer() { + return this.normalizer; + } + + /** + * Returns a custom Tokenizer used for analysis, if defined + */ + public NameOrDefinition tokenizer() { + return this.tokenizer; + } + + /** + * Returns the custom token filters used for analysis, if defined + */ + public List tokenFilters() { + return this.tokenFilters; + } + + /** + * Returns the custom character filters used for analysis, if defined + */ + public List charFilters() { + return this.charFilters; + } + + /** + * Returns the field to take an Analyzer from, if defined + */ + public String field() { + return this.field; + } + + /** + * Set whether or not detailed explanations of analysis should be returned + */ + public AnalyzeRequest explain(boolean explain) { + this.explain = explain; + return this; + } + + public boolean explain() { + return this.explain; + } + + public AnalyzeRequest attributes(String... attributes) { + if (attributes == null) { + throw new IllegalArgumentException("attributes must not be null"); + } + this.attributes = attributes; + return this; + } + + public String[] attributes() { + return this.attributes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("text", text); + if (Strings.isNullOrEmpty(analyzer) == false) { + builder.field("analyzer", analyzer); + } + if (tokenizer != null) { + builder.field("tokenizer", tokenizer); + } + if (tokenFilters.size() > 0) { + builder.field("filter", tokenFilters); + } + if (charFilters.size() > 0) { + builder.field("char_filter", charFilters); + } + if (Strings.isNullOrEmpty(field) == false) { + builder.field("field", field); + } + if (explain) { + builder.field("explain", true); + } + if (attributes.length > 0) { + builder.field("attributes", attributes); + } + if (Strings.isNullOrEmpty(normalizer) == false) { + builder.field("normalizer", normalizer); + } + return builder.endObject(); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java new file mode 100644 index 00000000000..aaba8653dee --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -0,0 +1,183 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class AnalyzeResponse { + + private static final String TOKENS = "tokens"; + private static final String DETAIL = "detail"; + + public static class AnalyzeToken { + private String term; + private int startOffset; + private int endOffset; + private int position; + private int positionLength = 1; + private String type; + private final Map attributes = new HashMap<>(); + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeResponse.AnalyzeToken that = (AnalyzeResponse.AnalyzeToken) o; + return startOffset == that.startOffset && + endOffset == that.endOffset && + position == that.position && + positionLength == that.positionLength && + Objects.equals(term, that.term) && + Objects.equals(attributes, that.attributes) && + Objects.equals(type, that.type); + } + + @Override + public int hashCode() { + return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); + } + + public String getTerm() { + return this.term; + } + + private void setTerm(String term) { + this.term = term; + } + + public int getStartOffset() { + return this.startOffset; + } + + private void setStartOffset(int startOffset) { + this.startOffset = startOffset; + } + + public int getEndOffset() { + return this.endOffset; + } + + private void setEndOffset(int endOffset) { + this.endOffset = endOffset; + } + + public int getPosition() { + return this.position; + } + + private void setPosition(int position) { + this.position = position; + } + + public int getPositionLength() { + return this.positionLength; + } + + private void setPositionLength(int positionLength) { + this.positionLength = positionLength; + } + + public String getType() { + return this.type; + } + + private void setType(String type) { + this.type = type; + } + + public Map getAttributes() { + return this.attributes; + } + + private void setAttribute(String key, Object value) { + this.attributes.put(key, value); + } + + private static final ObjectParser PARSER + = new ObjectParser<>("analyze_token", AnalyzeToken::setAttribute, AnalyzeToken::new); + static { + PARSER.declareString(AnalyzeToken::setTerm, new ParseField("token")); + PARSER.declareString(AnalyzeToken::setType, new ParseField("type")); + PARSER.declareInt(AnalyzeToken::setPosition, new ParseField("position")); + PARSER.declareInt(AnalyzeToken::setStartOffset, new ParseField("start_offset")); + PARSER.declareInt(AnalyzeToken::setEndOffset, new ParseField("end_offset")); + PARSER.declareInt(AnalyzeToken::setPositionLength, new ParseField("positionLength")); + } + + public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + } + + private final DetailAnalyzeResponse detail; + private final List tokens; + + private AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { + this.tokens = tokens; + this.detail = detail; + } + + public List getTokens() { + return this.tokens; + } + + public DetailAnalyzeResponse detail() { + return this.detail; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", + true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); + + static { + PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeToken.PARSER, new ParseField(TOKENS)); + PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(DETAIL)); + } + + public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeResponse that = (AnalyzeResponse) o; + return Objects.equals(detail, that.detail) && + Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(detail, tokens); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java new file mode 100644 index 00000000000..36cf8afad0d --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class DetailAnalyzeResponse { + + private final boolean customAnalyzer; + private final AnalyzeTokenList analyzer; + private final CharFilteredText[] charfilters; + private final AnalyzeTokenList tokenizer; + private final AnalyzeTokenList[] tokenfilters; + + private DetailAnalyzeResponse(boolean customAnalyzer, + AnalyzeTokenList analyzer, + List charfilters, + AnalyzeTokenList tokenizer, + List tokenfilters) { + this.customAnalyzer = customAnalyzer; + this.analyzer = analyzer; + this.charfilters = charfilters == null ? null : charfilters.toArray(new CharFilteredText[]{}); + this.tokenizer = tokenizer; + this.tokenfilters = tokenfilters == null ? null : tokenfilters.toArray(new AnalyzeTokenList[]{}); + } + + public AnalyzeTokenList analyzer() { + return this.analyzer; + } + + public CharFilteredText[] charfilters() { + return this.charfilters; + } + + public AnalyzeTokenList tokenizer() { + return tokenizer; + } + + public AnalyzeTokenList[] tokenfilters() { + return tokenfilters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; + return customAnalyzer == that.customAnalyzer && + Objects.equals(analyzer, that.analyzer) && + Arrays.equals(charfilters, that.charfilters) && + Objects.equals(tokenizer, that.tokenizer) && + Arrays.equals(tokenfilters, that.tokenfilters); + } + + @Override + public int hashCode() { + int result = Objects.hash(customAnalyzer, analyzer, tokenizer); + result = 31 * result + Arrays.hashCode(charfilters); + result = 31 * result + Arrays.hashCode(tokenfilters); + return result; + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", + true, args -> new DetailAnalyzeResponse( + (boolean) args[0], + (AnalyzeTokenList) args[1], + (List)args[2], + (AnalyzeTokenList) args[3], + (List)args[4])); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField("custom_analyzer")); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("analyzer")); + PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField("charfilters")); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenizer")); + PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenfilters")); + } + + public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static class AnalyzeTokenList { + private final String name; + private final AnalyzeResponse.AnalyzeToken[] tokens; + + private static final String TOKENS = "tokens"; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeTokenList that = (AnalyzeTokenList) o; + return Objects.equals(name, that.name) && + Arrays.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(tokens); + return result; + } + + public AnalyzeTokenList(String name, List tokens) { + this.name = name; + this.tokens = tokens.toArray(new AnalyzeResponse.AnalyzeToken[]{}); + } + + public String getName() { + return name; + } + + public AnalyzeResponse.AnalyzeToken[] getTokens() { + return tokens; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", + true, args -> new AnalyzeTokenList((String) args[0], + (List)args[1])); + + static { + PARSER.declareString(constructorArg(), new ParseField("name")); + PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), + new ParseField("tokens")); + } + + public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + } + + public static class CharFilteredText { + private final String name; + private final String[] texts; + + CharFilteredText(String name, String[] texts) { + this.name = name; + if (texts != null) { + this.texts = texts; + } else { + this.texts = Strings.EMPTY_ARRAY; + } + } + + public String getName() { + return name; + } + + public String[] getTexts() { + return texts; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", + true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); + + static { + PARSER.declareString(constructorArg(), new ParseField("name")); + PARSER.declareStringArray(constructorArg(), new ParseField("filtered_text")); + } + + public static CharFilteredText fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CharFilteredText that = (CharFilteredText) o; + return Objects.equals(name, that.name) && + Arrays.equals(texts, that.texts); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(texts); + return result; + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index d9adf61782b..458e6371010 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -28,8 +28,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -58,6 +56,8 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.AnalyzeRequest; +import org.elasticsearch.client.indices.AnalyzeResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.client.indices.FreezeIndexRequest; @@ -1852,12 +1852,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { RestHighLevelClient client = highLevelClient(); - AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english"); + AnalyzeRequest noindexRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three"); AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync); assertThat(noindexResponse.getTokens(), hasSize(3)); - AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true); + AnalyzeRequest detailsRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three").explain(true); AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync); assertNotNull(detailsResponse.detail()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java index f7d5ac51a73..8f52dd7b00b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -45,6 +44,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; import org.elasticsearch.client.indices.GetIndexRequest; @@ -86,18 +86,14 @@ import static org.hamcrest.Matchers.nullValue; public class IndicesRequestConvertersTests extends ESTestCase { public void testAnalyzeRequest() throws Exception { - AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() - .text("Here is some text") - .index("test_index") - .analyzer("test_analyzer"); + AnalyzeRequest indexAnalyzeRequest + = AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text"); Request request = IndicesRequestConverters.analyze(indexAnalyzeRequest); assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); RequestConvertersTests.assertToXContentBody(indexAnalyzeRequest, request.getEntity()); - AnalyzeRequest analyzeRequest = new AnalyzeRequest() - .text("more text") - .analyzer("test_analyzer"); + AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text"); assertThat(IndicesRequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 498da6d5642..e45bac9a3ea 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -57,6 +56,7 @@ import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.client.core.CountRequest; import org.elasticsearch.client.core.MultiTermVectorsRequest; import org.elasticsearch.client.core.TermVectorsRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -1643,18 +1643,14 @@ public class RequestConvertersTests extends ESTestCase { } public void testAnalyzeRequest() throws Exception { - AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() - .text("Here is some text") - .index("test_index") - .analyzer("test_analyzer"); + AnalyzeRequest indexAnalyzeRequest + = AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text"); Request request = RequestConverters.analyze(indexAnalyzeRequest); assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); assertToXContentBody(indexAnalyzeRequest, request.getEntity()); - AnalyzeRequest analyzeRequest = new AnalyzeRequest() - .text("more text") - .analyzer("test_analyzer"); + AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text"); assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 14def60b277..8e0a3d2fd00 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -26,9 +26,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; -import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -62,8 +59,11 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.SyncedFlushResponse; import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.AnalyzeRequest; +import org.elasticsearch.client.indices.AnalyzeResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; +import org.elasticsearch.client.indices.DetailAnalyzeResponse; import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; import org.elasticsearch.client.indices.GetFieldMappingsResponse; @@ -2418,32 +2418,29 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase { // tag::analyze-builtin-request - AnalyzeRequest request = new AnalyzeRequest(); - request.text("Some text to analyze", "Some more text to analyze"); // <1> - request.analyzer("english"); // <2> + AnalyzeRequest request = AnalyzeRequest.withGlobalAnalyzer("english", // <1> + "Some text to analyze", "Some more text to analyze"); // <2> // end::analyze-builtin-request } { // tag::analyze-custom-request - AnalyzeRequest request = new AnalyzeRequest(); - request.text("Some text to analyze"); - request.addCharFilter("html_strip"); // <1> - request.tokenizer("standard"); // <2> - request.addTokenFilter("lowercase"); // <3> - Map stopFilter = new HashMap<>(); stopFilter.put("type", "stop"); - stopFilter.put("stopwords", new String[]{ "to" }); // <4> - request.addTokenFilter(stopFilter); // <5> + stopFilter.put("stopwords", new String[]{ "to" }); // <1> + AnalyzeRequest request = AnalyzeRequest.buildCustomAnalyzer("standard") // <2> + .addCharFilter("html_strip") // <3> + .addTokenFilter("lowercase") // <4> + .addTokenFilter(stopFilter) // <5> + .build("Some text to analyze"); // end::analyze-custom-request } { // tag::analyze-custom-normalizer-request - AnalyzeRequest request = new AnalyzeRequest(); - request.text("BaR"); - request.addTokenFilter("lowercase"); + AnalyzeRequest request = AnalyzeRequest.buildCustomNormalizer() + .addTokenFilter("lowercase") + .build("BaR"); // end::analyze-custom-normalizer-request // tag::analyze-request-explain @@ -2484,10 +2481,11 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase { // tag::analyze-index-request - AnalyzeRequest request = new AnalyzeRequest(); - request.index("my_index"); // <1> - request.analyzer("my_analyzer"); // <2> - request.text("some text to analyze"); + AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( + "my_index", // <1> + "my_analyzer", // <2> + "some text to analyze" + ); // end::analyze-index-request // tag::analyze-execute-listener @@ -2505,10 +2503,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::analyze-execute-listener // use a built-in analyzer in the test - request = new AnalyzeRequest(); - request.index("my_index"); - request.field("my_field"); - request.text("some text to analyze"); + request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze"); // Use a blocking listener in the test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); @@ -2522,19 +2517,17 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase { // tag::analyze-index-normalizer-request - AnalyzeRequest request = new AnalyzeRequest(); - request.index("my_index"); // <1> - request.normalizer("my_normalizer"); // <2> - request.text("some text to analyze"); + AnalyzeRequest request = AnalyzeRequest.withNormalizer( + "my_index", // <1> + "my_normalizer", // <2> + "some text to analyze" + ); // end::analyze-index-normalizer-request } { // tag::analyze-field-request - AnalyzeRequest request = new AnalyzeRequest(); - request.index("my_index"); - request.field("my_field"); - request.text("some text to analyze"); + AnalyzeRequest request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze"); // end::analyze-field-request } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java new file mode 100644 index 00000000000..073ede5246f --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class AnalyzeGlobalRequestTests extends AnalyzeRequestTests { + + private static final Map charFilterConfig = new HashMap<>(); + static { + charFilterConfig.put("type", "html_strip"); + } + + private static final Map tokenFilterConfig = new HashMap<>(); + static { + tokenFilterConfig.put("type", "synonym"); + } + + @Override + protected AnalyzeRequest createClientTestInstance() { + int option = random().nextInt(3); + switch (option) { + case 0: + return AnalyzeRequest.withGlobalAnalyzer("my_analyzer", "some text", "some more text"); + case 1: + return AnalyzeRequest.buildCustomAnalyzer("my_tokenizer") + .addCharFilter("my_char_filter") + .addCharFilter(charFilterConfig) + .addTokenFilter("my_token_filter") + .addTokenFilter(tokenFilterConfig) + .build("some text", "some more text"); + case 2: + return AnalyzeRequest.buildCustomNormalizer() + .addCharFilter("my_char_filter") + .addCharFilter(charFilterConfig) + .addTokenFilter("my_token_filter") + .addTokenFilter(tokenFilterConfig) + .build("some text", "some more text"); + } + throw new IllegalStateException("nextInt(3) has returned a value greater than 2"); + } + + @Override + protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException { + return AnalyzeAction.Request.fromXContent(parser, null); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java new file mode 100644 index 00000000000..54d6e397e26 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class AnalyzeIndexRequestTests extends AnalyzeRequestTests { + + private static final Map charFilterConfig = new HashMap<>(); + static { + charFilterConfig.put("type", "html_strip"); + } + + private static final Map tokenFilterConfig = new HashMap<>(); + static { + tokenFilterConfig.put("type", "synonym"); + } + + @Override + protected AnalyzeRequest createClientTestInstance() { + int option = random().nextInt(5); + switch (option) { + case 0: + return AnalyzeRequest.withField("index", "field", "some text", "some more text"); + case 1: + return AnalyzeRequest.withIndexAnalyzer("index", "my_analyzer", "some text", "some more text"); + case 2: + return AnalyzeRequest.withNormalizer("index", "my_normalizer", "text", "more text"); + case 3: + return AnalyzeRequest.buildCustomAnalyzer("index", "my_tokenizer") + .addCharFilter("my_char_filter") + .addCharFilter(charFilterConfig) + .addTokenFilter("my_token_filter") + .addTokenFilter(tokenFilterConfig) + .build("some text", "some more text"); + case 4: + return AnalyzeRequest.buildCustomNormalizer("index") + .addCharFilter("my_char_filter") + .addCharFilter(charFilterConfig) + .addTokenFilter("my_token_filter") + .addTokenFilter(tokenFilterConfig) + .build("some text", "some more text"); + } + throw new IllegalStateException("nextInt(5) has returned a value greater than 4"); + } + + @Override + protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException { + return AnalyzeAction.Request.fromXContent(parser, "index"); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java new file mode 100644 index 00000000000..50a339fc805 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.client.AbstractRequestTestCase; + +public abstract class AnalyzeRequestTests extends AbstractRequestTestCase { + + @Override + protected void assertInstances(AnalyzeAction.Request serverInstance, AnalyzeRequest clientTestInstance) { + assertEquals(serverInstance.index(), clientTestInstance.index()); + assertArrayEquals(serverInstance.text(), clientTestInstance.text()); + assertEquals(serverInstance.analyzer(), clientTestInstance.analyzer()); + assertEquals(serverInstance.normalizer(), clientTestInstance.normalizer()); + assertEquals(serverInstance.charFilters().size(), clientTestInstance.charFilters().size()); + for (int i = 0; i < serverInstance.charFilters().size(); i++) { + assertEquals(serverInstance.charFilters().get(i).name, clientTestInstance.charFilters().get(i).name); + assertEquals(serverInstance.charFilters().get(i).definition, clientTestInstance.charFilters().get(i).definition); + } + assertEquals(serverInstance.tokenFilters().size(), clientTestInstance.tokenFilters().size()); + for (int i = 0; i < serverInstance.tokenFilters().size(); i++) { + assertEquals(serverInstance.tokenFilters().get(i).name, clientTestInstance.tokenFilters().get(i).name); + assertEquals(serverInstance.tokenFilters().get(i).definition, clientTestInstance.tokenFilters().get(i).definition); + } + if (serverInstance.tokenizer() != null) { + assertEquals(serverInstance.tokenizer().name, clientTestInstance.tokenizer().name); + assertEquals(serverInstance.tokenizer().definition, clientTestInstance.tokenizer().definition); + } + else { + assertNull(clientTestInstance.tokenizer()); + } + assertEquals(serverInstance.field(), clientTestInstance.field()); + assertEquals(serverInstance.explain(), clientTestInstance.explain()); + assertArrayEquals(serverInstance.attributes(), clientTestInstance.attributes()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java new file mode 100644 index 00000000000..e29fa88d7fe --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java @@ -0,0 +1,174 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.client.AbstractResponseTestCase; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class AnalyzeResponseTests extends AbstractResponseTestCase { + + @Override + protected AnalyzeAction.Response createServerTestInstance() { + int tokenCount = randomIntBetween(1, 30); + AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount]; + for (int i = 0; i < tokenCount; i++) { + tokens[i] = randomToken(); + } + if (randomBoolean()) { + AnalyzeAction.CharFilteredText[] charfilters = null; + AnalyzeAction.AnalyzeTokenList[] tokenfilters = null; + if (randomBoolean()) { + charfilters = new AnalyzeAction.CharFilteredText[]{ + new AnalyzeAction.CharFilteredText("my_charfilter", new String[]{"one two"}) + }; + } + if (randomBoolean()) { + tokenfilters = new AnalyzeAction.AnalyzeTokenList[]{ + new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_1", tokens), + new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_2", tokens) + }; + } + AnalyzeAction.DetailAnalyzeResponse dar = new AnalyzeAction.DetailAnalyzeResponse( + charfilters, + new AnalyzeAction.AnalyzeTokenList("my_tokenizer", tokens), + tokenfilters); + return new AnalyzeAction.Response(null, dar); + } + return new AnalyzeAction.Response(Arrays.asList(tokens), null); + } + + private AnalyzeAction.AnalyzeToken randomToken() { + String token = randomAlphaOfLengthBetween(1, 20); + int position = randomIntBetween(0, 1000); + int startOffset = randomIntBetween(0, 1000); + int endOffset = randomIntBetween(0, 1000); + int posLength = randomIntBetween(1, 5); + String type = randomAlphaOfLengthBetween(1, 20); + Map extras = new HashMap<>(); + if (randomBoolean()) { + int entryCount = randomInt(6); + for (int i = 0; i < entryCount; i++) { + switch (randomInt(6)) { + case 0: + case 1: + case 2: + case 3: + String key = randomAlphaOfLength(5); + String value = randomAlphaOfLength(10); + extras.put(key, value); + break; + case 4: + String objkey = randomAlphaOfLength(5); + Map obj = new HashMap<>(); + obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10)); + extras.put(objkey, obj); + break; + case 5: + String listkey = randomAlphaOfLength(5); + List list = new ArrayList<>(); + list.add(randomAlphaOfLength(4)); + list.add(randomAlphaOfLength(6)); + extras.put(listkey, list); + break; + } + } + } + return new AnalyzeAction.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras); + } + + @Override + protected AnalyzeResponse doParseToClientInstance(XContentParser parser) throws IOException { + return AnalyzeResponse.fromXContent(parser); + } + + @Override + protected void assertInstances(AnalyzeAction.Response serverTestInstance, AnalyzeResponse clientInstance) { + if (serverTestInstance.detail() != null) { + assertNotNull(clientInstance.detail()); + assertInstances(serverTestInstance.detail(), clientInstance.detail()); + } + else { + assertEquals(serverTestInstance.getTokens().size(), clientInstance.getTokens().size()); + for (int i = 0; i < serverTestInstance.getTokens().size(); i++) { + assertEqualTokens(serverTestInstance.getTokens().get(0), clientInstance.getTokens().get(0)); + } + } + } + + private static void assertEqualTokens(AnalyzeAction.AnalyzeToken serverToken, AnalyzeResponse.AnalyzeToken clientToken) { + assertEquals(serverToken.getTerm(), clientToken.getTerm()); + assertEquals(serverToken.getPosition(), clientToken.getPosition()); + assertEquals(serverToken.getPositionLength(), clientToken.getPositionLength()); + assertEquals(serverToken.getStartOffset(), clientToken.getStartOffset()); + assertEquals(serverToken.getEndOffset(), clientToken.getEndOffset()); + assertEquals(serverToken.getType(), clientToken.getType()); + assertEquals(serverToken.getAttributes(), clientToken.getAttributes()); + } + + private static void assertInstances(AnalyzeAction.DetailAnalyzeResponse serverResponse, DetailAnalyzeResponse clientResponse) { + assertInstances(serverResponse.analyzer(), clientResponse.analyzer()); + assertInstances(serverResponse.tokenizer(), clientResponse.tokenizer()); + if (serverResponse.tokenfilters() == null) { + assertNull(clientResponse.tokenfilters()); + } + else { + assertEquals(serverResponse.tokenfilters().length, clientResponse.tokenfilters().length); + for (int i = 0; i < serverResponse.tokenfilters().length; i++) { + assertInstances(serverResponse.tokenfilters()[i], clientResponse.tokenfilters()[i]); + } + } + if (serverResponse.charfilters() == null) { + assertNull(clientResponse.charfilters()); + } + else { + assertEquals(serverResponse.charfilters().length, clientResponse.charfilters().length); + for (int i = 0; i < serverResponse.charfilters().length; i++) { + assertInstances(serverResponse.charfilters()[i], clientResponse.charfilters()[i]); + } + } + } + + private static void assertInstances(AnalyzeAction.AnalyzeTokenList serverTokens, + DetailAnalyzeResponse.AnalyzeTokenList clientTokens) { + if (serverTokens == null) { + assertNull(clientTokens); + } + else { + assertEquals(serverTokens.getName(), clientTokens.getName()); + assertEquals(serverTokens.getTokens().length, clientTokens.getTokens().length); + for (int i = 0; i < serverTokens.getTokens().length; i++) { + assertEqualTokens(serverTokens.getTokens()[i], clientTokens.getTokens()[i]); + } + } + } + + private static void assertInstances(AnalyzeAction.CharFilteredText serverText, DetailAnalyzeResponse.CharFilteredText clientText) { + assertEquals(serverText.getName(), clientText.getName()); + assertArrayEquals(serverText.getTexts(), clientText.getTexts()); + } +} diff --git a/docs/java-rest/high-level/indices/analyze.asciidoc b/docs/java-rest/high-level/indices/analyze.asciidoc index 4978c9ebcca..9464394fd1e 100644 --- a/docs/java-rest/high-level/indices/analyze.asciidoc +++ b/docs/java-rest/high-level/indices/analyze.asciidoc @@ -19,18 +19,18 @@ The simplest version uses a built-in analyzer: --------------------------------------------------- include-tagged::{doc-tests-file}[{api}-builtin-request] --------------------------------------------------- -<1> The text to include. Multiple strings are treated as a multi-valued field -<2> A built-in analyzer +<1> A built-in analyzer +<2> The text to include. Multiple strings are treated as a multi-valued field You can configure a custom analyzer: ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- include-tagged::{doc-tests-file}[{api}-custom-request] --------------------------------------------------- -<1> Configure char filters +<1> Configuration for a custom tokenfilter <2> Configure the tokenizer -<3> Add a built-in tokenfilter -<4> Configuration for a custom tokenfilter +<3> Configure char filters +<4> Add a built-in tokenfilter <5> Add the custom tokenfilter You can also build a custom normalizer, by including only charfilters and diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 3677cd6cb4e..65c54ce70d4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -20,9 +20,35 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; -public class AnalyzeAction extends Action { +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class AnalyzeAction extends Action { public static final AnalyzeAction INSTANCE = new AnalyzeAction(); public static final String NAME = "indices:admin/analyze"; @@ -32,12 +58,802 @@ public class AnalyzeAction extends Action { } @Override - public Writeable.Reader getResponseReader() { - return AnalyzeResponse::new; + public Writeable.Reader getResponseReader() { + return Response::new; } @Override - public AnalyzeResponse newResponse() { + public Response newResponse() { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } + + /** + * A request to analyze a text associated with a specific index. Allow to provide + * the actual analyzer name to perform the analysis with. + */ + public static class Request extends SingleShardRequest { + + private String[] text; + private String analyzer; + private NameOrDefinition tokenizer; + private final List tokenFilters = new ArrayList<>(); + private final List charFilters = new ArrayList<>(); + private String field; + private boolean explain = false; + private String[] attributes = Strings.EMPTY_ARRAY; + private String normalizer; + + public static class NameOrDefinition implements Writeable { + // exactly one of these two members is not null + public final String name; + public final Settings definition; + + NameOrDefinition(String name) { + this.name = Objects.requireNonNull(name); + this.definition = null; + } + + NameOrDefinition(Map definition) { + this.name = null; + Objects.requireNonNull(definition); + try { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(definition); + this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); + } + } + + NameOrDefinition(StreamInput in) throws IOException { + name = in.readOptionalString(); + if (in.readBoolean()) { + definition = Settings.readSettingsFromStream(in); + } else { + definition = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(name); + boolean isNotNullDefinition = this.definition != null; + out.writeBoolean(isNotNullDefinition); + if (isNotNullDefinition) { + Settings.writeSettingsToStream(definition, out); + } + } + + public static NameOrDefinition fromXContent(XContentParser parser) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new NameOrDefinition(parser.text()); + } + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + return new NameOrDefinition(parser.map()); + } + throw new XContentParseException(parser.getTokenLocation(), + "Expected [VALUE_STRING] or [START_OBJECT], got " + parser.currentToken()); + } + + } + + public Request() { + } + + /** + * Constructs a new analyzer request for the provided index. + * + * @param index The text to analyze + */ + public Request(String index) { + this.index(index); + } + + public String[] text() { + return this.text; + } + + public Request text(String... text) { + this.text = text; + return this; + } + + public Request text(List text) { + this.text = text.toArray(new String[]{}); + return this; + } + + public Request analyzer(String analyzer) { + this.analyzer = analyzer; + return this; + } + + public String analyzer() { + return this.analyzer; + } + + public Request tokenizer(String tokenizer) { + this.tokenizer = new NameOrDefinition(tokenizer); + return this; + } + + public Request tokenizer(Map tokenizer) { + this.tokenizer = new NameOrDefinition(tokenizer); + return this; + } + + public void tokenizer(NameOrDefinition tokenizer) { + this.tokenizer = tokenizer; + } + + public NameOrDefinition tokenizer() { + return this.tokenizer; + } + + public Request addTokenFilter(String tokenFilter) { + this.tokenFilters.add(new NameOrDefinition(tokenFilter)); + return this; + } + + public Request addTokenFilter(Map tokenFilter) { + this.tokenFilters.add(new NameOrDefinition(tokenFilter)); + return this; + } + + public void setTokenFilters(List tokenFilters) { + this.tokenFilters.addAll(tokenFilters); + } + + public List tokenFilters() { + return this.tokenFilters; + } + + public Request addCharFilter(Map charFilter) { + this.charFilters.add(new NameOrDefinition(charFilter)); + return this; + } + + public Request addCharFilter(String charFilter) { + this.charFilters.add(new NameOrDefinition(charFilter)); + return this; + } + + public void setCharFilters(List charFilters) { + this.charFilters.addAll(charFilters); + } + + public List charFilters() { + return this.charFilters; + } + + public Request field(String field) { + this.field = field; + return this; + } + + public String field() { + return this.field; + } + + public Request explain(boolean explain) { + this.explain = explain; + return this; + } + + public boolean explain() { + return this.explain; + } + + public Request attributes(String... attributes) { + if (attributes == null) { + throw new IllegalArgumentException("attributes must not be null"); + } + this.attributes = attributes; + return this; + } + + public void attributes(List attributes) { + this.attributes = attributes.toArray(new String[]{}); + } + + public String[] attributes() { + return this.attributes; + } + + public String normalizer() { + return this.normalizer; + } + + public Request normalizer(String normalizer) { + this.normalizer = normalizer; + return this; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (text == null || text.length == 0) { + validationException = addValidationError("text is missing", validationException); + } + if ((index == null || index.length() == 0) && normalizer != null) { + validationException = addValidationError("index is required if normalizer is specified", validationException); + } + if (normalizer != null && (tokenizer != null || analyzer != null)) { + validationException + = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + text = in.readStringArray(); + analyzer = in.readOptionalString(); + tokenizer = in.readOptionalWriteable(NameOrDefinition::new); + tokenFilters.addAll(in.readList(NameOrDefinition::new)); + charFilters.addAll(in.readList(NameOrDefinition::new)); + field = in.readOptionalString(); + explain = in.readBoolean(); + attributes = in.readStringArray(); + normalizer = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(text); + out.writeOptionalString(analyzer); + out.writeOptionalWriteable(tokenizer); + out.writeList(tokenFilters); + out.writeList(charFilters); + out.writeOptionalString(field); + out.writeBoolean(explain); + out.writeStringArray(attributes); + out.writeOptionalString(normalizer); + } + + public static Request fromXContent(XContentParser parser, String index) throws IOException { + Request request = new Request(index); + PARSER.parse(parser, request, null); + return request; + } + + private static final ObjectParser PARSER = new ObjectParser<>("analyze_request", null); + static { + PARSER.declareStringArray(Request::text, new ParseField("text")); + PARSER.declareString(Request::analyzer, new ParseField("analyzer")); + PARSER.declareField(Request::tokenizer, (p, c) -> NameOrDefinition.fromXContent(p), + new ParseField("tokenizer"), ObjectParser.ValueType.OBJECT_OR_STRING); + PARSER.declareObjectArray(Request::setTokenFilters, (p, c) -> NameOrDefinition.fromXContent(p), + new ParseField("filter")); + PARSER.declareObjectArray(Request::setCharFilters, (p, c) -> NameOrDefinition.fromXContent(p), + new ParseField("char_filter")); + PARSER.declareString(Request::field, new ParseField("field")); + PARSER.declareBoolean(Request::explain, new ParseField("explain")); + PARSER.declareStringArray(Request::attributes, new ParseField("attributes")); + PARSER.declareString(Request::normalizer, new ParseField("normalizer")); + } + + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final DetailAnalyzeResponse detail; + private final List tokens; + + public Response(List tokens, DetailAnalyzeResponse detail) { + this.tokens = tokens; + this.detail = detail; + } + + public Response(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + if (size > 0) { + tokens = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + tokens.add(new AnalyzeToken(in)); + } + } + else { + tokens = null; + } + detail = in.readOptionalWriteable(DetailAnalyzeResponse::new); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + public List getTokens() { + return this.tokens; + } + + public DetailAnalyzeResponse detail() { + return this.detail; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (tokens != null) { + builder.startArray(Fields.TOKENS); + for (AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + builder.endArray(); + } + + if (detail != null) { + builder.startObject(Fields.DETAIL); + detail.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (tokens != null) { + out.writeVInt(tokens.size()); + for (AnalyzeToken token : tokens) { + token.writeTo(out); + } + } else { + out.writeVInt(0); + } + out.writeOptionalWriteable(detail); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(detail, that.detail) && + Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(detail, tokens); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + static final class Fields { + static final String TOKENS = "tokens"; + + static final String DETAIL = "detail"; + } + } + + public static class AnalyzeToken implements Writeable, ToXContentObject { + private final String term; + private final int startOffset; + private final int endOffset; + private final int position; + private final int positionLength; + private final Map attributes; + private final String type; + + static final String TOKEN = "token"; + static final String START_OFFSET = "start_offset"; + static final String END_OFFSET = "end_offset"; + static final String TYPE = "type"; + static final String POSITION = "position"; + static final String POSITION_LENGTH = "positionLength"; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeToken that = (AnalyzeToken) o; + return startOffset == that.startOffset && + endOffset == that.endOffset && + position == that.position && + positionLength == that.positionLength && + Objects.equals(term, that.term) && + Objects.equals(attributes, that.attributes) && + Objects.equals(type, that.type); + } + + @Override + public int hashCode() { + return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); + } + + public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, + String type, Map attributes) { + this.term = term; + this.position = position; + this.startOffset = startOffset; + this.endOffset = endOffset; + this.positionLength = positionLength; + this.type = type; + this.attributes = attributes; + } + + AnalyzeToken(StreamInput in) throws IOException { + term = in.readString(); + startOffset = in.readInt(); + endOffset = in.readInt(); + position = in.readVInt(); + Integer len = in.readOptionalVInt(); + if (len != null) { + positionLength = len; + } else { + positionLength = 1; + } + type = in.readOptionalString(); + attributes = in.readMap(); + } + + public String getTerm() { + return this.term; + } + + public int getStartOffset() { + return this.startOffset; + } + + public int getEndOffset() { + return this.endOffset; + } + + public int getPosition() { + return this.position; + } + + public int getPositionLength() { + return this.positionLength; + } + + public String getType() { + return this.type; + } + + public Map getAttributes(){ + return this.attributes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOKEN, term); + builder.field(START_OFFSET, startOffset); + builder.field(END_OFFSET, endOffset); + builder.field(TYPE, type); + builder.field(POSITION, position); + if (positionLength > 1) { + builder.field(POSITION_LENGTH, positionLength); + } + if (attributes != null && !attributes.isEmpty()) { + Map sortedAttributes = new TreeMap<>(attributes); + for (Map.Entry entity : sortedAttributes.entrySet()) { + builder.field(entity.getKey(), entity.getValue()); + } + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(term); + out.writeInt(startOffset); + out.writeInt(endOffset); + out.writeVInt(position); + out.writeOptionalVInt(positionLength > 1 ? positionLength : null); + out.writeOptionalString(type); + out.writeMapWithConsistentOrder(attributes); + } + } + + public static class DetailAnalyzeResponse implements Writeable, ToXContentFragment { + + private final boolean customAnalyzer; + private final AnalyzeTokenList analyzer; + private final CharFilteredText[] charfilters; + private final AnalyzeTokenList tokenizer; + private final AnalyzeTokenList[] tokenfilters; + + public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { + this(false, analyzer, null, null, null); + } + + public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) { + this(true, null, charfilters, tokenizer, tokenfilters); + } + + DetailAnalyzeResponse(boolean customAnalyzer, + AnalyzeTokenList analyzer, + CharFilteredText[] charfilters, + AnalyzeTokenList tokenizer, + AnalyzeTokenList[] tokenfilters) { + this.customAnalyzer = customAnalyzer; + this.analyzer = analyzer; + this.charfilters = charfilters; + this.tokenizer = tokenizer; + this.tokenfilters = tokenfilters; + } + + DetailAnalyzeResponse(StreamInput in) throws IOException { + this.customAnalyzer = in.readBoolean(); + if (customAnalyzer) { + tokenizer = new AnalyzeTokenList(in); + int size = in.readVInt(); + if (size > 0) { + charfilters = new CharFilteredText[size]; + for (int i = 0; i < size; i++) { + charfilters[i] = new CharFilteredText(in); + } + } else { + charfilters = null; + } + size = in.readVInt(); + if (size > 0) { + tokenfilters = new AnalyzeTokenList[size]; + for (int i = 0; i < size; i++) { + tokenfilters[i] = new AnalyzeTokenList(in); + } + } else { + tokenfilters = null; + } + analyzer = null; + } else { + analyzer = new AnalyzeTokenList(in); + tokenfilters = null; + tokenizer = null; + charfilters = null; + } + } + + public AnalyzeTokenList analyzer() { + return this.analyzer; + } + + public CharFilteredText[] charfilters() { + return this.charfilters; + } + + public AnalyzeTokenList tokenizer() { + return tokenizer; + } + + public AnalyzeTokenList[] tokenfilters() { + return tokenfilters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; + return customAnalyzer == that.customAnalyzer && + Objects.equals(analyzer, that.analyzer) && + Arrays.equals(charfilters, that.charfilters) && + Objects.equals(tokenizer, that.tokenizer) && + Arrays.equals(tokenfilters, that.tokenfilters); + } + + @Override + public int hashCode() { + int result = Objects.hash(customAnalyzer, analyzer, tokenizer); + result = 31 * result + Arrays.hashCode(charfilters); + result = 31 * result + Arrays.hashCode(tokenfilters); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("custom_analyzer", customAnalyzer); + + if (analyzer != null) { + builder.startObject("analyzer"); + analyzer.toXContentWithoutObject(builder, params); + builder.endObject(); + } + + if (charfilters != null) { + builder.startArray("charfilters"); + for (CharFilteredText charfilter : charfilters) { + charfilter.toXContent(builder, params); + } + builder.endArray(); + } + + if (tokenizer != null) { + builder.startObject("tokenizer"); + tokenizer.toXContentWithoutObject(builder, params); + builder.endObject(); + } + + if (tokenfilters != null) { + builder.startArray("tokenfilters"); + for (AnalyzeTokenList tokenfilter : tokenfilters) { + tokenfilter.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(customAnalyzer); + if (customAnalyzer) { + tokenizer.writeTo(out); + if (charfilters != null) { + out.writeVInt(charfilters.length); + for (CharFilteredText charfilter : charfilters) { + charfilter.writeTo(out); + } + } else { + out.writeVInt(0); + } + if (tokenfilters != null) { + out.writeVInt(tokenfilters.length); + for (AnalyzeTokenList tokenfilter : tokenfilters) { + tokenfilter.writeTo(out); + } + } else { + out.writeVInt(0); + } + } else { + analyzer.writeTo(out); + } + } + } + + public static class AnalyzeTokenList implements Writeable, ToXContentObject { + private final String name; + private final AnalyzeToken[] tokens; + + static final String NAME = "name"; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeTokenList that = (AnalyzeTokenList) o; + return Objects.equals(name, that.name) && + Arrays.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(tokens); + return result; + } + + public AnalyzeTokenList(String name, AnalyzeToken[] tokens) { + this.name = name; + this.tokens = tokens; + } + + AnalyzeTokenList(StreamInput in) throws IOException { + name = in.readString(); + int size = in.readVInt(); + if (size > 0) { + tokens = new AnalyzeToken[size]; + for (int i = 0; i < size; i++) { + tokens[i] = new AnalyzeToken(in); + } + } + else { + tokens = null; + } + } + + public String getName() { + return name; + } + + public AnalyzeToken[] getTokens() { + return tokens; + } + + void toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { + builder.field(NAME, this.name); + builder.startArray(Response.Fields.TOKENS); + if (tokens != null) { + for (AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + } + builder.endArray(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + toXContentWithoutObject(builder, params); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + if (tokens != null) { + out.writeVInt(tokens.length); + for (AnalyzeToken token : tokens) { + token.writeTo(out); + } + } else { + out.writeVInt(0); + } + } + } + + public static class CharFilteredText implements Writeable, ToXContentObject { + private final String name; + private final String[] texts; + + static final String NAME = "name"; + static final String FILTERED_TEXT = "filtered_text"; + + public CharFilteredText(String name, String[] texts) { + this.name = name; + if (texts != null) { + this.texts = texts; + } else { + this.texts = Strings.EMPTY_ARRAY; + } + } + + CharFilteredText(StreamInput in) throws IOException { + name = in.readString(); + texts = in.readStringArray(); + } + + public String getName() { + return name; + } + + public String[] getTexts() { + return texts; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NAME, name); + builder.array(FILTERED_TEXT, texts); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeStringArray(texts); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CharFilteredText that = (CharFilteredText) o; + return Objects.equals(name, that.name) && + Arrays.equals(texts, that.texts); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(texts); + return result; + } + } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java deleted file mode 100644 index 09686025e9d..00000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.action.admin.indices.analyze; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.single.shard.SingleShardRequest; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.action.ValidateActions.addValidationError; - -/** - * A request to analyze a text associated with a specific index. Allow to provide - * the actual analyzer name to perform the analysis with. - */ -public class AnalyzeRequest extends SingleShardRequest implements ToXContentObject { - - private String[] text; - - private String analyzer; - - private NameOrDefinition tokenizer; - - private final List tokenFilters = new ArrayList<>(); - - private final List charFilters = new ArrayList<>(); - - private String field; - - private boolean explain = false; - - private String[] attributes = Strings.EMPTY_ARRAY; - - private String normalizer; - - public static class NameOrDefinition implements Writeable, ToXContentFragment { - // exactly one of these two members is not null - public final String name; - public final Settings definition; - - NameOrDefinition(String name) { - this.name = Objects.requireNonNull(name); - this.definition = null; - } - - NameOrDefinition(Map definition) { - this.name = null; - Objects.requireNonNull(definition); - try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - builder.map(definition); - this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); - } - } - - NameOrDefinition(StreamInput in) throws IOException { - name = in.readOptionalString(); - if (in.readBoolean()) { - definition = Settings.readSettingsFromStream(in); - } else { - definition = null; - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalString(name); - boolean isNotNullDefinition = this.definition != null; - out.writeBoolean(isNotNullDefinition); - if (isNotNullDefinition) { - Settings.writeSettingsToStream(definition, out); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (definition == null) { - return builder.value(name); - } - return definition.toXContent(builder, params); - } - - } - - public AnalyzeRequest() { - } - - /** - * Constructs a new analyzer request for the provided index. - * - * @param index The text to analyze - */ - public AnalyzeRequest(String index) { - this.index(index); - } - - public String[] text() { - return this.text; - } - - public AnalyzeRequest text(String... text) { - this.text = text; - return this; - } - - public AnalyzeRequest analyzer(String analyzer) { - this.analyzer = analyzer; - return this; - } - - public String analyzer() { - return this.analyzer; - } - - public AnalyzeRequest tokenizer(String tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public AnalyzeRequest tokenizer(Map tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public NameOrDefinition tokenizer() { - return this.tokenizer; - } - - public AnalyzeRequest addTokenFilter(String tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public AnalyzeRequest addTokenFilter(Map tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public List tokenFilters() { - return this.tokenFilters; - } - - public AnalyzeRequest addCharFilter(Map charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public AnalyzeRequest addCharFilter(String charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public List charFilters() { - return this.charFilters; - } - - public AnalyzeRequest field(String field) { - this.field = field; - return this; - } - - public String field() { - return this.field; - } - - public AnalyzeRequest explain(boolean explain) { - this.explain = explain; - return this; - } - - public boolean explain() { - return this.explain; - } - - public AnalyzeRequest attributes(String... attributes) { - if (attributes == null) { - throw new IllegalArgumentException("attributes must not be null"); - } - this.attributes = attributes; - return this; - } - - public String[] attributes() { - return this.attributes; - } - - public String normalizer() { - return this.normalizer; - } - - public AnalyzeRequest normalizer(String normalizer) { - this.normalizer = normalizer; - return this; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (text == null || text.length == 0) { - validationException = addValidationError("text is missing", validationException); - } - if ((index == null || index.length() == 0) && normalizer != null) { - validationException = addValidationError("index is required if normalizer is specified", validationException); - } - if (normalizer != null && (tokenizer != null || analyzer != null)) { - validationException = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); - } - return validationException; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - text = in.readStringArray(); - analyzer = in.readOptionalString(); - tokenizer = in.readOptionalWriteable(NameOrDefinition::new); - tokenFilters.addAll(in.readList(NameOrDefinition::new)); - charFilters.addAll(in.readList(NameOrDefinition::new)); - field = in.readOptionalString(); - explain = in.readBoolean(); - attributes = in.readStringArray(); - if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { - normalizer = in.readOptionalString(); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(text); - out.writeOptionalString(analyzer); - out.writeOptionalWriteable(tokenizer); - out.writeList(tokenFilters); - out.writeList(charFilters); - out.writeOptionalString(field); - out.writeBoolean(explain); - out.writeStringArray(attributes); - if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { - out.writeOptionalString(normalizer); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("text", text); - if (Strings.isNullOrEmpty(analyzer) == false) { - builder.field("analyzer", analyzer); - } - if (tokenizer != null) { - tokenizer.toXContent(builder, params); - } - if (tokenFilters.size() > 0) { - builder.field("filter", tokenFilters); - } - if (charFilters.size() > 0) { - builder.field("char_filter", charFilters); - } - if (Strings.isNullOrEmpty(field) == false) { - builder.field("field", field); - } - if (explain) { - builder.field("explain", true); - } - if (attributes.length > 0) { - builder.field("attributes", attributes); - } - if (Strings.isNullOrEmpty(normalizer) == false) { - builder.field("normalizer", normalizer); - } - return builder.endObject(); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java index 3893cb25d9d..2bd1724c5e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java @@ -23,14 +23,15 @@ import org.elasticsearch.client.ElasticsearchClient; import java.util.Map; -public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder { +public class AnalyzeRequestBuilder + extends SingleShardOperationRequestBuilder { public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action) { - super(client, action, new AnalyzeRequest()); + super(client, action, new AnalyzeAction.Request()); } public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action, String index, String... text) { - super(client, action, new AnalyzeRequest(index).text(text)); + super(client, action, new AnalyzeAction.Request(index).text(text)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java deleted file mode 100644 index 7e6d525cefb..00000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.action.admin.indices.analyze; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -public class AnalyzeResponse extends ActionResponse implements Iterable, ToXContentObject { - - public static class AnalyzeToken implements Writeable, ToXContentObject { - private final String term; - private final int startOffset; - private final int endOffset; - private final int position; - private final int positionLength; - private final Map attributes; - private final String type; - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AnalyzeToken that = (AnalyzeToken) o; - return startOffset == that.startOffset && - endOffset == that.endOffset && - position == that.position && - positionLength == that.positionLength && - Objects.equals(term, that.term) && - Objects.equals(attributes, that.attributes) && - Objects.equals(type, that.type); - } - - @Override - public int hashCode() { - return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); - } - - AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, - String type, Map attributes) { - this.term = term; - this.position = position; - this.startOffset = startOffset; - this.endOffset = endOffset; - this.positionLength = positionLength; - this.type = type; - this.attributes = attributes; - } - - AnalyzeToken(StreamInput in) throws IOException { - term = in.readString(); - startOffset = in.readInt(); - endOffset = in.readInt(); - position = in.readVInt(); - Integer len = in.readOptionalVInt(); - if (len != null) { - positionLength = len; - } else { - positionLength = 1; - } - type = in.readOptionalString(); - attributes = in.readMap(); - } - - public String getTerm() { - return this.term; - } - - public int getStartOffset() { - return this.startOffset; - } - - public int getEndOffset() { - return this.endOffset; - } - - public int getPosition() { - return this.position; - } - - public int getPositionLength() { - return this.positionLength; - } - - public String getType() { - return this.type; - } - - public Map getAttributes(){ - return this.attributes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Fields.TOKEN, term); - builder.field(Fields.START_OFFSET, startOffset); - builder.field(Fields.END_OFFSET, endOffset); - builder.field(Fields.TYPE, type); - builder.field(Fields.POSITION, position); - if (positionLength > 1) { - builder.field(Fields.POSITION_LENGTH, positionLength); - } - if (attributes != null && !attributes.isEmpty()) { - Map sortedAttributes = new TreeMap<>(attributes); - for (Map.Entry entity : sortedAttributes.entrySet()) { - builder.field(entity.getKey(), entity.getValue()); - } - } - builder.endObject(); - return builder; - } - - public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); - String field = null; - String term = ""; - int position = -1; - int startOffset = -1; - int endOffset = -1; - int positionLength = 1; - String type = ""; - Map attributes = new HashMap<>(); - for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) { - if (t == XContentParser.Token.FIELD_NAME) { - field = parser.currentName(); - continue; - } - if (Fields.TOKEN.equals(field)) { - term = parser.text(); - } else if (Fields.POSITION.equals(field)) { - position = parser.intValue(); - } else if (Fields.START_OFFSET.equals(field)) { - startOffset = parser.intValue(); - } else if (Fields.END_OFFSET.equals(field)) { - endOffset = parser.intValue(); - } else if (Fields.POSITION_LENGTH.equals(field)) { - positionLength = parser.intValue(); - } else if (Fields.TYPE.equals(field)) { - type = parser.text(); - } else { - if (t == XContentParser.Token.VALUE_STRING) { - attributes.put(field, parser.text()); - } else if (t == XContentParser.Token.VALUE_NUMBER) { - attributes.put(field, parser.numberValue()); - } else if (t == XContentParser.Token.VALUE_BOOLEAN) { - attributes.put(field, parser.booleanValue()); - } else if (t == XContentParser.Token.START_OBJECT) { - attributes.put(field, parser.map()); - } else if (t == XContentParser.Token.START_ARRAY) { - attributes.put(field, parser.list()); - } - } - } - return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(term); - out.writeInt(startOffset); - out.writeInt(endOffset); - out.writeVInt(position); - out.writeOptionalVInt(positionLength > 1 ? positionLength : null); - out.writeOptionalString(type); - out.writeMapWithConsistentOrder(attributes); - } - } - - private final DetailAnalyzeResponse detail; - private final List tokens; - - public AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { - this.tokens = tokens; - this.detail = detail; - } - - public AnalyzeResponse(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - if (size > 0) { - tokens = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - tokens.add(new AnalyzeToken(in)); - } - } - else { - tokens = null; - } - detail = in.readOptionalWriteable(DetailAnalyzeResponse::new); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - - public List getTokens() { - return this.tokens; - } - - public DetailAnalyzeResponse detail() { - return this.detail; - } - - @Override - public Iterator iterator() { - return tokens.iterator(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (tokens != null) { - builder.startArray(Fields.TOKENS); - for (AnalyzeToken token : tokens) { - token.toXContent(builder, params); - } - builder.endArray(); - } - - if (detail != null) { - builder.startObject(Fields.DETAIL); - detail.toXContent(builder, params); - builder.endObject(); - } - builder.endObject(); - return builder; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", - true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); - static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeToken.fromXContent(p), new ParseField(Fields.TOKENS)); - PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(Fields.DETAIL)); - } - - public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - if (tokens != null) { - out.writeVInt(tokens.size()); - for (AnalyzeToken token : tokens) { - token.writeTo(out); - } - } else { - out.writeVInt(0); - } - out.writeOptionalWriteable(detail); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AnalyzeResponse that = (AnalyzeResponse) o; - return Objects.equals(detail, that.detail) && - Objects.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - return Objects.hash(detail, tokens); - } - - @Override - public String toString() { - return Strings.toString(this, true, true); - } - - static final class Fields { - static final String TOKENS = "tokens"; - static final String TOKEN = "token"; - static final String START_OFFSET = "start_offset"; - static final String END_OFFSET = "end_offset"; - static final String TYPE = "type"; - static final String POSITION = "position"; - static final String POSITION_LENGTH = "positionLength"; - static final String DETAIL = "detail"; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java deleted file mode 100644 index 1e84d9e0a2e..00000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java +++ /dev/null @@ -1,400 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.analyze; - - -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.lang.reflect.Array; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DetailAnalyzeResponse implements Writeable, ToXContentFragment { - - private final boolean customAnalyzer; - private final AnalyzeTokenList analyzer; - private final CharFilteredText[] charfilters; - private final AnalyzeTokenList tokenizer; - private final AnalyzeTokenList[] tokenfilters; - - public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { - this(false, analyzer, null, null, null); - } - - public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) { - this(true, null, charfilters, tokenizer, tokenfilters); - } - - public DetailAnalyzeResponse(boolean customAnalyzer, - AnalyzeTokenList analyzer, - CharFilteredText[] charfilters, - AnalyzeTokenList tokenizer, - AnalyzeTokenList[] tokenfilters) { - this.customAnalyzer = customAnalyzer; - this.analyzer = analyzer; - this.charfilters = charfilters; - this.tokenizer = tokenizer; - this.tokenfilters = tokenfilters; - } - - public DetailAnalyzeResponse(StreamInput in) throws IOException { - this.customAnalyzer = in.readBoolean(); - if (customAnalyzer) { - tokenizer = new AnalyzeTokenList(in); - int size = in.readVInt(); - if (size > 0) { - charfilters = new CharFilteredText[size]; - for (int i = 0; i < size; i++) { - charfilters[i] = new CharFilteredText(in); - } - } - else { - charfilters = null; - } - size = in.readVInt(); - if (size > 0) { - tokenfilters = new AnalyzeTokenList[size]; - for (int i = 0; i < size; i++) { - tokenfilters[i] = new AnalyzeTokenList(in); - } - } - else { - tokenfilters = null; - } - analyzer = null; - } else { - analyzer = new AnalyzeTokenList(in); - tokenfilters = null; - tokenizer = null; - charfilters = null; - } - } - - public AnalyzeTokenList analyzer() { - return this.analyzer; - } - - public CharFilteredText[] charfilters() { - return this.charfilters; - } - - public AnalyzeTokenList tokenizer() { - return tokenizer; - } - - public AnalyzeTokenList[] tokenfilters() { - return tokenfilters; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; - return customAnalyzer == that.customAnalyzer && - Objects.equals(analyzer, that.analyzer) && - Arrays.equals(charfilters, that.charfilters) && - Objects.equals(tokenizer, that.tokenizer) && - Arrays.equals(tokenfilters, that.tokenfilters); - } - - @Override - public int hashCode() { - int result = Objects.hash(customAnalyzer, analyzer, tokenizer); - result = 31 * result + Arrays.hashCode(charfilters); - result = 31 * result + Arrays.hashCode(tokenfilters); - return result; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer); - - if (analyzer != null) { - builder.startObject(Fields.ANALYZER); - analyzer.toXContentWithoutObject(builder, params); - builder.endObject(); - } - - if (charfilters != null) { - builder.startArray(Fields.CHARFILTERS); - for (CharFilteredText charfilter : charfilters) { - charfilter.toXContent(builder, params); - } - builder.endArray(); - } - - if (tokenizer != null) { - builder.startObject(Fields.TOKENIZER); - tokenizer.toXContentWithoutObject(builder, params); - builder.endObject(); - } - - if (tokenfilters != null) { - builder.startArray(Fields.TOKENFILTERS); - for (AnalyzeTokenList tokenfilter : tokenfilters) { - tokenfilter.toXContent(builder, params); - } - builder.endArray(); - } - return builder; - } - - @SuppressWarnings("unchecked") - private static T[] fromList(Class clazz, List list) { - if (list == null) { - return null; - } - return list.toArray((T[])Array.newInstance(clazz, 0)); - } - - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", - true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], - fromList(CharFilteredText.class, (List)args[2]), - (AnalyzeTokenList) args[3], - fromList(AnalyzeTokenList.class, (List)args[4]))); - - static { - PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER)); - PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER)); - PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS)); - PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER)); - PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS)); - } - - public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - static final class Fields { - static final String NAME = "name"; - static final String FILTERED_TEXT = "filtered_text"; - static final String CUSTOM_ANALYZER = "custom_analyzer"; - static final String ANALYZER = "analyzer"; - static final String CHARFILTERS = "charfilters"; - static final String TOKENIZER = "tokenizer"; - static final String TOKENFILTERS = "tokenfilters"; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(customAnalyzer); - if (customAnalyzer) { - tokenizer.writeTo(out); - if (charfilters != null) { - out.writeVInt(charfilters.length); - for (CharFilteredText charfilter : charfilters) { - charfilter.writeTo(out); - } - } else { - out.writeVInt(0); - } - if (tokenfilters != null) { - out.writeVInt(tokenfilters.length); - for (AnalyzeTokenList tokenfilter : tokenfilters) { - tokenfilter.writeTo(out); - } - } else { - out.writeVInt(0); - } - } else { - analyzer.writeTo(out); - } - } - - public static class AnalyzeTokenList implements Writeable, ToXContentObject { - private final String name; - private final AnalyzeResponse.AnalyzeToken[] tokens; - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AnalyzeTokenList that = (AnalyzeTokenList) o; - return Objects.equals(name, that.name) && - Arrays.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - int result = Objects.hash(name); - result = 31 * result + Arrays.hashCode(tokens); - return result; - } - - public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { - this.name = name; - this.tokens = tokens; - } - - public AnalyzeTokenList(StreamInput in) throws IOException { - name = in.readString(); - int size = in.readVInt(); - if (size > 0) { - tokens = new AnalyzeResponse.AnalyzeToken[size]; - for (int i = 0; i < size; i++) { - tokens[i] = new AnalyzeResponse.AnalyzeToken(in); - } - } - else { - tokens = null; - } - } - - public String getName() { - return name; - } - - public AnalyzeResponse.AnalyzeToken[] getTokens() { - return tokens; - } - - XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.NAME, this.name); - builder.startArray(AnalyzeResponse.Fields.TOKENS); - if (tokens != null) { - for (AnalyzeResponse.AnalyzeToken token : tokens) { - token.toXContent(builder, params); - } - } - builder.endArray(); - return builder; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - toXContentWithoutObject(builder, params); - builder.endObject(); - return builder; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", - true, args -> new AnalyzeTokenList((String) args[0], - fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1]))); - - static { - PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); - PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), - new ParseField(AnalyzeResponse.Fields.TOKENS)); - } - - public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - if (tokens != null) { - out.writeVInt(tokens.length); - for (AnalyzeResponse.AnalyzeToken token : tokens) { - token.writeTo(out); - } - } else { - out.writeVInt(0); - } - } - } - - public static class CharFilteredText implements Writeable, ToXContentObject { - private final String name; - private final String[] texts; - - public CharFilteredText(String name, String[] texts) { - this.name = name; - if (texts != null) { - this.texts = texts; - } else { - this.texts = Strings.EMPTY_ARRAY; - } - } - - public CharFilteredText(StreamInput in) throws IOException { - name = in.readString(); - texts = in.readStringArray(); - } - - public String getName() { - return name; - } - - public String[] getTexts() { - return texts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Fields.NAME, name); - builder.array(Fields.FILTERED_TEXT, texts); - builder.endObject(); - return builder; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", - true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); - - static { - PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); - PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT)); - } - - public static CharFilteredText fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeStringArray(texts); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CharFilteredText that = (CharFilteredText) o; - return Objects.equals(name, that.name) && - Arrays.equals(texts, that.texts); - } - - @Override - public int hashCode() { - int result = Objects.hash(name); - result = 31 * result + Arrays.hashCode(texts); - return result; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 55bd5937426..abee1b07505 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -79,7 +79,7 @@ import java.util.function.Function; /** * Transport action used to execute analyze requests */ -public class TransportAnalyzeAction extends TransportSingleShardAction { +public class TransportAnalyzeAction extends TransportSingleShardAction { private final Settings settings; private final IndicesService indicesService; @@ -90,19 +90,19 @@ public class TransportAnalyzeAction extends TransportSingleShardAction getResponseReader() { - return AnalyzeResponse::new; + protected Writeable.Reader getResponseReader() { + return AnalyzeAction.Response::new; } @Override - protected boolean resolveIndex(AnalyzeRequest request) { + protected boolean resolveIndex(AnalyzeAction.Request request) { return request.index() != null; } @@ -124,7 +124,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokens = null; - DetailAnalyzeResponse detail = null; + List tokens = null; + AnalyzeAction.DetailAnalyzeResponse detail = null; if (request.explain()) { detail = detailAnalyze(request, analyzer, field, maxTokenCount); @@ -250,13 +251,13 @@ public class TransportAnalyzeAction extends TransportSingleShardAction simpleAnalyze(AnalyzeRequest request, - Analyzer analyzer, String field, int maxTokenCount) { + private static List simpleAnalyze(AnalyzeAction.Request request, + Analyzer analyzer, String field, int maxTokenCount) { TokenCounter tc = new TokenCounter(maxTokenCount); - List tokens = new ArrayList<>(); + List tokens = new ArrayList<>(); int lastPosition = -1; int lastOffset = 0; for (String text : request.text()) { @@ -273,7 +274,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction 0) { lastPosition = lastPosition + increment; } - tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), + tokens.add(new AnalyzeAction.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), null)); tc.increment(); } @@ -290,8 +291,9 @@ public class TransportAnalyzeAction extends TransportSingleShardAction includeAttributes = new HashSet<>(); if (request.attributes() != null) { for (String attribute : request.attributes()) { @@ -351,25 +353,25 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokens; + List tokens; private TokenCounter tc; TokenListCreator(int maxTokenCount) { @@ -465,7 +468,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction 0) { lastPosition = lastPosition + increment; } - tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), + tokens.add(new AnalyzeAction.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), extractExtendedAttributes(stream, includeAttributes))); tc.increment(); @@ -484,8 +487,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction parseCharFilterFactories(AnalyzeRequest request, IndexSettings indexSettings, + private static List parseCharFilterFactories(AnalyzeAction.Request request, IndexSettings indexSettings, AnalysisRegistry analysisRegistry, Environment environment, boolean normalizer) throws IOException { List charFilterFactoryList = new ArrayList<>(); if (request.charFilters() != null && request.charFilters().size() > 0) { - List charFilters = request.charFilters(); - for (AnalyzeRequest.NameOrDefinition charFilter : charFilters) { + List charFilters = request.charFilters(); + for (AnalyzeAction.Request.NameOrDefinition charFilter : charFilters) { CharFilterFactory charFilterFactory; // parse anonymous settings if (charFilter.definition != null) { @@ -619,7 +622,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction parseTokenFilterFactories(AnalyzeRequest request, IndexSettings indexSettings, + private static List parseTokenFilterFactories(AnalyzeAction.Request request, IndexSettings indexSettings, AnalysisRegistry analysisRegistry, Environment environment, Tuple tokenizerFactory, List charFilterFactoryList, @@ -627,8 +630,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenFilterFactoryList = new ArrayList<>(); DeferredTokenFilterRegistry deferredRegistry = new DeferredTokenFilterRegistry(analysisRegistry, indexSettings); if (request.tokenFilters() != null && request.tokenFilters().size() > 0) { - List tokenFilters = request.tokenFilters(); - for (AnalyzeRequest.NameOrDefinition tokenFilter : tokenFilters) { + List tokenFilters = request.tokenFilters(); + for (AnalyzeAction.Request.NameOrDefinition tokenFilter : tokenFilters) { TokenFilterFactory tokenFilterFactory; // parse anonymous settings if (tokenFilter.definition != null) { @@ -683,11 +686,12 @@ public class TransportAnalyzeAction extends TransportSingleShardAction parseTokenizerFactory(AnalyzeRequest request, IndexAnalyzers indexAnalzyers, - AnalysisRegistry analysisRegistry, Environment environment) throws IOException { + private static Tuple parseTokenizerFactory(AnalyzeAction.Request request, IndexAnalyzers indexAnalzyers, + AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { String name; TokenizerFactory tokenizerFactory; - final AnalyzeRequest.NameOrDefinition tokenizer = request.tokenizer(); + final AnalyzeAction.Request.NameOrDefinition tokenizer = request.tokenizer(); // parse anonymous settings if (tokenizer.definition != null) { Settings settings = getAnonymousSettings(tokenizer.definition); diff --git a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index d5a73981f29..3eb863bff7b 100644 --- a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -28,9 +28,8 @@ import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; @@ -672,12 +671,12 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Analyze text under the provided index. */ - ActionFuture analyze(AnalyzeRequest request); + ActionFuture analyze(AnalyzeAction.Request request); /** * Analyze text under the provided index. */ - void analyze(AnalyzeRequest request, ActionListener listener); + void analyze(AnalyzeAction.Request request, ActionListener listener); /** * Analyze text under the provided index. diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e79f0567bab..5c4c7ad44c6 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -142,9 +142,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; @@ -1596,12 +1594,12 @@ public abstract class AbstractClient implements Client { } @Override - public ActionFuture analyze(final AnalyzeRequest request) { + public ActionFuture analyze(final AnalyzeAction.Request request) { return execute(AnalyzeAction.INSTANCE, request); } @Override - public void analyze(final AnalyzeRequest request, final ActionListener listener) { + public void analyze(final AnalyzeAction.Request request, final ActionListener listener) { execute(AnalyzeAction.INSTANCE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index d9d6bbcfee9..99c85981068 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; @@ -29,8 +29,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -64,106 +62,10 @@ public class RestAnalyzeAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - - AnalyzeRequest analyzeRequest = new AnalyzeRequest(request.param("index")); - try (XContentParser parser = request.contentOrSourceParamParser()) { - buildFromContent(parser, analyzeRequest); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to parse request body", e); - } - - return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel)); - } - - static void buildFromContent(XContentParser parser, AnalyzeRequest analyzeRequest) - throws IOException { - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Malformed content, must start with an object"); - } else { - XContentParser.Token token; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (Fields.TEXT.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.text(parser.text()); - } else if (Fields.TEXT.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.START_ARRAY) { - List texts = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token.isValue() == false) { - throw new IllegalArgumentException(currentFieldName + " array element should only contain text"); - } - texts.add(parser.text()); - } - analyzeRequest.text(texts.toArray(new String[texts.size()])); - } else if (Fields.ANALYZER.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.analyzer(parser.text()); - } else if (Fields.FIELD.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.field(parser.text()); - } else if (Fields.TOKENIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.tokenizer(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - analyzeRequest.tokenizer(parser.map()); - } else { - throw new IllegalArgumentException(currentFieldName + " should be tokenizer's name or setting"); - } - } else if (Fields.TOKEN_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.addTokenFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - analyzeRequest.addTokenFilter(parser.map()); - } else { - throw new IllegalArgumentException(currentFieldName - + " array element should contain filter's name or setting"); - } - } - } else if (Fields.CHAR_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.addCharFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - analyzeRequest.addCharFilter(parser.map()); - } else { - throw new IllegalArgumentException(currentFieldName - + " array element should contain char filter's name or setting"); - } - } - } else if (Fields.EXPLAIN.match(currentFieldName, parser.getDeprecationHandler())) { - if (parser.isBooleanValue()) { - analyzeRequest.explain(parser.booleanValue()); - } else { - throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'"); - } - } else if (Fields.ATTRIBUTES.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.START_ARRAY) { - List attributes = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token.isValue() == false) { - throw new IllegalArgumentException(currentFieldName + " array element should only contain attribute name"); - } - attributes.add(parser.text()); - } - analyzeRequest.attributes(attributes.toArray(new String[attributes.size()])); - } else if (Fields.NORMALIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.normalizer(parser.text()); - } else { - throw new IllegalArgumentException(currentFieldName + " should be normalizer's name"); - } - } else { - throw new IllegalArgumentException("Unknown parameter [" - + currentFieldName + "] in request body or parameter is of the wrong type[" + token + "] "); - } - } + AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(parser, request.param("index")); + return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel)); } } + } diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 2dbb52d547f..cb5bad021ea 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.action; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; @@ -207,7 +206,7 @@ public class IndicesRequestIT extends ESIntegTestCase { String analyzeShardAction = AnalyzeAction.NAME + "[s]"; interceptTransportActions(analyzeShardAction); - AnalyzeRequest analyzeRequest = new AnalyzeRequest(randomIndexOrAlias()); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request(randomIndexOrAlias()); analyzeRequest.text("text"); internalCluster().coordOnlyNodeClient().admin().indices().analyze(analyzeRequest).actionGet(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index b0c2e34c306..c4d7834f5a8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -22,8 +22,7 @@ import org.apache.lucene.analysis.MockTokenFilter; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.UUIDs; @@ -136,15 +135,15 @@ public class TransportAnalyzeActionTests extends ESTestCase { */ public void testNoIndexAnalyzers() throws IOException { // Refer to an analyzer by its type so we get its default configuration - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("standard"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); + List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); // Refer to a token filter by its type so we get its default configuration - request = new AnalyzeRequest(); + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addTokenFilter("mock"); @@ -157,7 +156,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { assertEquals("fox", tokens.get(2).getTerm()); // We can refer to a pre-configured token filter by its name to get it - request = new AnalyzeRequest(); + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addCharFilter("append_foo"); @@ -171,7 +170,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { assertEquals("foxfoo", tokens.get(3).getTerm()); // We can refer to a token filter by its type to get its default configuration - request = new AnalyzeRequest(); + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addCharFilter("append"); @@ -187,11 +186,11 @@ public class TransportAnalyzeActionTests extends ESTestCase { } public void testFillsAttributes() throws IOException { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.analyzer("standard"); request.text("the 1 brown fox"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); + List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); assertEquals(0, tokens.get(0).getStartOffset()); @@ -219,12 +218,12 @@ public class TransportAnalyzeActionTests extends ESTestCase { } public void testWithIndexAnalyzers() throws IOException { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("custom_analyzer"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + List tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); assertEquals("brown", tokens.get(1).getTerm()); @@ -263,7 +262,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { public void testGetIndexAnalyserWithoutIndexAnalyzers() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .analyzer("custom_analyzer") .text("the qu1ck brown fox-dog"), "text", null, null, registry, environment, maxTokenCount)); @@ -274,7 +273,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { boolean notGlobal = randomBoolean(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .analyzer("foobar") .text("the qu1ck brown fox"), "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); @@ -286,7 +285,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .tokenizer("foobar") .text("the qu1ck brown fox"), "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); @@ -298,7 +297,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .tokenizer("standard") .addTokenFilter("foobar") .text("the qu1ck brown fox"), @@ -311,7 +310,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .tokenizer("standard") .addTokenFilter("lowercase") .addCharFilter("foobar") @@ -325,7 +324,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .normalizer("foobar") .text("the qu1ck brown fox"), "text", null, indexAnalyzers, registry, environment, maxTokenCount)); @@ -333,13 +332,13 @@ public class TransportAnalyzeActionTests extends ESTestCase { } public void testNonPreBuildTokenFilter() throws IOException { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.tokenizer("standard"); request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters() request.text("the quick brown fox"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + List tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); assertEquals("brown", tokens.get(1).getTerm()); @@ -347,12 +346,12 @@ public class TransportAnalyzeActionTests extends ESTestCase { } public void testNormalizerWithIndex() throws IOException { - AnalyzeRequest request = new AnalyzeRequest("index"); + AnalyzeAction.Request request = new AnalyzeAction.Request("index"); request.normalizer("my_normalizer"); request.text("ABc"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + List tokens = analyze.getTokens(); assertEquals(1, tokens.size()); assertEquals("abc", tokens.get(0).getTerm()); @@ -372,7 +371,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { String text = sbText.toString(); // request with explain=false to test simpleAnalyze path in TransportAnalyzeAction - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text(text); request.analyzer("standard"); IllegalStateException e = expectThrows(IllegalStateException.class, @@ -382,7 +381,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { + maxTokenCount + "]." + " This limit can be set by changing the [index.analyze.max_token_count] index level setting."); // request with explain=true to test detailAnalyze path in TransportAnalyzeAction - AnalyzeRequest request2 = new AnalyzeRequest(); + AnalyzeAction.Request request2 = new AnalyzeAction.Request(); request2.text(text); request2.analyzer("standard"); request2.explain(true); @@ -406,7 +405,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { } String text = sbText.toString(); - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text(text); request.analyzer("standard"); IllegalStateException e = expectThrows(IllegalStateException.class, diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java index d83b2fae0f9..017cf3a8385 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java @@ -30,7 +30,7 @@ import java.io.IOException; public class AnalyzeRequestTests extends ESTestCase { public void testValidation() throws Exception { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); ActionRequestValidationException e = request.validate(); assertNotNull("text validation should fail", e); @@ -60,7 +60,7 @@ public class AnalyzeRequestTests extends ESTestCase { e = request.validate(); assertTrue(e.getMessage().contains("tokenizer/analyze should be null if normalizer is specified")); - AnalyzeRequest requestAnalyzer = new AnalyzeRequest("index"); + AnalyzeAction.Request requestAnalyzer = new AnalyzeAction.Request("index"); requestAnalyzer.normalizer("some normalizer"); requestAnalyzer.text("something"); requestAnalyzer.analyzer("analyzer"); @@ -69,7 +69,7 @@ public class AnalyzeRequestTests extends ESTestCase { } public void testSerialization() throws IOException { - AnalyzeRequest request = new AnalyzeRequest("foo"); + AnalyzeAction.Request request = new AnalyzeAction.Request("foo"); request.text("a", "b"); request.tokenizer("tokenizer"); request.addTokenFilter("tokenfilter"); @@ -79,7 +79,7 @@ public class AnalyzeRequestTests extends ESTestCase { try (BytesStreamOutput output = new BytesStreamOutput()) { request.writeTo(output); try (StreamInput in = output.bytes().streamInput()) { - AnalyzeRequest serialized = new AnalyzeRequest(); + AnalyzeAction.Request serialized = new AnalyzeAction.Request(); serialized.readFrom(in); assertArrayEquals(request.text(), serialized.text()); assertEquals(request.tokenizer().name, serialized.tokenizer().name); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java index a4cee7a4cde..95fc010f37f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java @@ -20,124 +20,35 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import static org.hamcrest.Matchers.equalTo; -public class AnalyzeResponseTests extends AbstractSerializingTestCase { - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - return s -> s.contains("tokens."); - } - - @Override - protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOException { - return AnalyzeResponse.fromXContent(parser); - } - - @Override - protected Writeable.Reader instanceReader() { - return AnalyzeResponse::new; - } - - @Override - protected AnalyzeResponse createTestInstance() { - int tokenCount = randomIntBetween(1, 30); - AnalyzeResponse.AnalyzeToken[] tokens = new AnalyzeResponse.AnalyzeToken[tokenCount]; - for (int i = 0; i < tokenCount; i++) { - tokens[i] = randomToken(); - } - if (randomBoolean()) { - DetailAnalyzeResponse.CharFilteredText[] charfilters = null; - DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = null; - if (randomBoolean()) { - charfilters = new DetailAnalyzeResponse.CharFilteredText[]{ - new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"}) - }; - } - if (randomBoolean()) { - tokenfilters = new DetailAnalyzeResponse.AnalyzeTokenList[]{ - new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens), - new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens) - }; - } - DetailAnalyzeResponse dar = new DetailAnalyzeResponse( - charfilters, - new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens), - tokenfilters); - return new AnalyzeResponse(null, dar); - } - return new AnalyzeResponse(Arrays.asList(tokens), null); - } - - private AnalyzeResponse.AnalyzeToken randomToken() { - String token = randomAlphaOfLengthBetween(1, 20); - int position = randomIntBetween(0, 1000); - int startOffset = randomIntBetween(0, 1000); - int endOffset = randomIntBetween(0, 1000); - int posLength = randomIntBetween(1, 5); - String type = randomAlphaOfLengthBetween(1, 20); - Map extras = new HashMap<>(); - if (randomBoolean()) { - int entryCount = randomInt(6); - for (int i = 0; i < entryCount; i++) { - switch (randomInt(6)) { - case 0: - case 1: - case 2: - case 3: - String key = randomAlphaOfLength(5); - String value = randomAlphaOfLength(10); - extras.put(key, value); - break; - case 4: - String objkey = randomAlphaOfLength(5); - Map obj = new HashMap<>(); - obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10)); - extras.put(objkey, obj); - break; - case 5: - String listkey = randomAlphaOfLength(5); - List list = new ArrayList<>(); - list.add(randomAlphaOfLength(4)); - list.add(randomAlphaOfLength(6)); - extras.put(listkey, list); - break; - } - } - } - return new AnalyzeResponse.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras); - } +public class AnalyzeResponseTests extends ESTestCase { + @SuppressWarnings("unchecked") public void testNullResponseToXContent() throws IOException { - DetailAnalyzeResponse.CharFilteredText[] charfilters = null; + AnalyzeAction.CharFilteredText[] charfilters = null; String name = "test_tokens_null"; - AnalyzeResponse.AnalyzeToken[] tokens = null; - DetailAnalyzeResponse.AnalyzeTokenList tokenizer = null; + AnalyzeAction.AnalyzeToken[] tokens = null; + AnalyzeAction.AnalyzeTokenList tokenizer = null; - DetailAnalyzeResponse.AnalyzeTokenList tokenfiltersItem = new DetailAnalyzeResponse.AnalyzeTokenList(name, tokens); - DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = {tokenfiltersItem}; + AnalyzeAction.AnalyzeTokenList tokenfiltersItem = new AnalyzeAction.AnalyzeTokenList(name, tokens); + AnalyzeAction.AnalyzeTokenList[] tokenfilters = {tokenfiltersItem}; - DetailAnalyzeResponse detail = new DetailAnalyzeResponse(charfilters, tokenizer, tokenfilters); + AnalyzeAction.DetailAnalyzeResponse detail = new AnalyzeAction.DetailAnalyzeResponse(charfilters, tokenizer, tokenfilters); - AnalyzeResponse response = new AnalyzeResponse(null, detail); + AnalyzeAction.Response response = new AnalyzeAction.Response(null, detail); try (XContentBuilder builder = JsonXContent.contentBuilder()) { response.toXContent(builder, ToXContent.EMPTY_PARAMS); Map converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); diff --git a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 4511c59c6b3..10a1ffe5c7b 100644 --- a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -19,8 +19,8 @@ package org.elasticsearch.indices.analyze; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -53,9 +53,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { ensureGreen(); for (int i = 0; i < 10; i++) { - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("this")); assertThat(token.getStartOffset(), equalTo(0)); assertThat(token.getEndOffset(), equalTo(4)); @@ -94,7 +94,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { } public void testAnalyzeWithNoIndex() throws Exception { - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").addTokenFilter("lowercase") @@ -105,7 +105,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("standard").addTokenFilter("lowercase") .get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("this")); token = analyzeResponse.getTokens().get(1); assertThat(token.getTerm(), equalTo("is")); @@ -134,9 +134,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST"); requestBuilder.setIndex(indexOrAlias()); requestBuilder.setField("document.simple"); - AnalyzeResponse analyzeResponse = requestBuilder.get(); + AnalyzeAction.Response analyzeResponse = requestBuilder.get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(3); assertThat(token.getTerm(), equalTo("test")); assertThat(token.getStartOffset(), equalTo(10)); assertThat(token.getEndOffset(), equalTo(14)); @@ -146,7 +146,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { // issue #5974 public void testThatStandardAndDefaultAnalyzersAreSame() throws Exception { - AnalyzeResponse response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get(); + AnalyzeAction.Response response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get(); assertTokens(response, "this", "is", "a", "test"); response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("default").get(); @@ -156,7 +156,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { assertTokens(response, "this", "is", "a", "test"); } - private void assertTokens(AnalyzeResponse response, String ... tokens) { + private void assertTokens(AnalyzeAction.Response response, String ... tokens) { assertThat(response.getTokens(), hasSize(tokens.length)); for (int i = 0; i < tokens.length; i++) { assertThat(response.getTokens().get(i).getTerm(), is(tokens[i])); @@ -180,9 +180,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { requestBuilder.setText(texts); requestBuilder.setIndex(indexOrAlias()); requestBuilder.setField("simple"); - AnalyzeResponse analyzeResponse = requestBuilder.get(); + AnalyzeAction.Response analyzeResponse = requestBuilder.get(); assertThat(analyzeResponse.getTokens().size(), equalTo(7)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(3); assertThat(token.getTerm(), equalTo("test")); assertThat(token.getPosition(), equalTo(3)); assertThat(token.getStartOffset(), equalTo(10)); @@ -199,7 +199,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { public void testDetailAnalyzeWithNoIndex() throws Exception { //analyzer only - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") .setExplain(true).setAnalyzer("simple").get(); assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue()); @@ -211,7 +211,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { public void testDetailAnalyzeCustomAnalyzerWithNoIndex() throws Exception { //analyzer only - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") .setExplain(true).setAnalyzer("simple").get(); assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue()); @@ -257,12 +257,12 @@ public class AnalyzeActionIT extends ESIntegTestCase { .setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get(); String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"}; - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts) + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts) .setExplain(true).setField("simple").setText(texts).execute().get(); assertThat(analyzeResponse.detail().analyzer().getName(), equalTo("simple")); assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(7)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3]; + AnalyzeAction.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3]; assertThat(token.getTerm(), equalTo("test")); assertThat(token.getPosition(), equalTo(3)); @@ -292,7 +292,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { Map stopFilterSettings = new HashMap<>(); stopFilterSettings.put("type", "stop"); stopFilterSettings.put("stopwords", new String[]{"foo", "buzz"}); - AnalyzeResponse analyzeResponse = client().admin().indices() + AnalyzeAction.Response analyzeResponse = client().admin().indices() .prepareAnalyze() .setText("Foo buzz test") .setTokenizer("standard") @@ -359,9 +359,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("test", "keyword", "type=keyword")); ensureGreen("test"); - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("ABC")); assertThat(token.getStartOffset(), equalTo(0)); assertThat(token.getEndOffset(), equalTo(3)); @@ -377,9 +377,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { .addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer")); ensureGreen("test"); - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("abc")); assertThat(token.getStartOffset(), equalTo(0)); assertThat(token.getEndOffset(), equalTo(3)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 406e9b1d36c..1cd79b3ae0c 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -29,9 +29,11 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.mock; public class RestAnalyzeActionTests extends ESTestCase { @@ -44,15 +46,13 @@ public class RestAnalyzeActionTests extends ESTestCase { .array("filter", "lowercase") .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test"); assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); - for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { + for (AnalyzeAction.Request.NameOrDefinition filter : analyzeRequest.tokenFilters()) { assertThat(filter.name, equalTo("lowercase")); } } @@ -79,9 +79,7 @@ public class RestAnalyzeActionTests extends ESTestCase { .field("normalizer", "normalizer") .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test"); assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); @@ -95,48 +93,45 @@ public class RestAnalyzeActionTests extends ESTestCase { } } - public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { + public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() { RestAnalyzeAction action = new RestAnalyzeAction(Settings.EMPTY, mock(RestController.class)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) .withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, null, null)); - assertThat(e.getMessage(), equalTo("Failed to parse request body")); + IOException e = expectThrows(IOException.class, () -> action.handleRequest(request, null, null)); + assertThat(e.getMessage(), containsString("expecting double-quote")); } public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("unknown", "keyword") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + () -> AnalyzeAction.Request.fromXContent(invalidContent, "for test")); + assertThat(e.getMessage(), containsString("unknown field [unknown]")); } } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("explain", "fals") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + () -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test")); + assertThat(e.getMessage(), containsString("failed to parse field [explain]")); } } public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("normalizer", true) .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + () -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test")); + assertThat(e.getMessage(), containsString("normalizer doesn't support values of type: VALUE_BOOLEAN")); } } @@ -147,9 +142,9 @@ public class RestAnalyzeActionTests extends ESTestCase { .field("tokenizer", "keyword") .array("filters", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser,"for test")); + assertThat(e.getMessage(), containsString("unknown field [filters]")); } try (XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -158,9 +153,9 @@ public class RestAnalyzeActionTests extends ESTestCase { .field("tokenizer", "keyword") .array("token_filters", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser, "for test")); + assertThat(e.getMessage(), containsString("unknown field [token_filters]")); } try (XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -169,9 +164,9 @@ public class RestAnalyzeActionTests extends ESTestCase { .field("tokenizer", "keyword") .array("char_filters", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser, "for test")); + assertThat(e.getMessage(), containsString("unknown field [char_filters]")); } try (XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -180,9 +175,9 @@ public class RestAnalyzeActionTests extends ESTestCase { .field("tokenizer", "keyword") .array("token_filter", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser, "for test")); + assertThat(e.getMessage(), containsString("unknown field [token_filter]")); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java index 409317bbf89..f4eb97fcb34 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.action; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.transport.TransportRequest; @@ -36,8 +35,8 @@ public class SecurityActionMapper { break; case AnalyzeAction.NAME: case AnalyzeAction.NAME + "[s]": - assert request instanceof AnalyzeRequest; - String[] indices = ((AnalyzeRequest) request).indices(); + assert request instanceof AnalyzeAction.Request; + String[] indices = ((AnalyzeAction.Request) request).indices(); if (indices == null || (indices.length == 1 && indices[0] == null)) { return CLUSTER_PERMISSION_ANALYZE; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java index 6efb293f7b2..ef063c93961 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.action; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.test.ESTestCase; @@ -73,11 +72,11 @@ public class SecurityActionMapperTests extends ESTestCase { public void testIndicesAnalyze() { SecurityActionMapper securityActionMapper = new SecurityActionMapper(); - AnalyzeRequest analyzeRequest; + AnalyzeAction.Request analyzeRequest; if (randomBoolean()) { - analyzeRequest = new AnalyzeRequest(randomAlphaOfLength(randomIntBetween(1, 30))).text("text"); + analyzeRequest = new AnalyzeAction.Request(randomAlphaOfLength(randomIntBetween(1, 30))).text("text"); } else { - analyzeRequest = new AnalyzeRequest(null).text("text"); + analyzeRequest = new AnalyzeAction.Request(null).text("text"); analyzeRequest.index(randomAlphaOfLength(randomIntBetween(1, 30))); } assertThat(securityActionMapper.action(AnalyzeAction.NAME, analyzeRequest), equalTo(AnalyzeAction.NAME)); @@ -85,7 +84,7 @@ public class SecurityActionMapperTests extends ESTestCase { public void testClusterAnalyze() { SecurityActionMapper securityActionMapper = new SecurityActionMapper(); - AnalyzeRequest analyzeRequest = new AnalyzeRequest(null).text("text"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request(null).text("text"); assertThat(securityActionMapper.action(AnalyzeAction.NAME, analyzeRequest), equalTo(SecurityActionMapper.CLUSTER_PERMISSION_ANALYZE)); } From df0f0b3d40589c3dca3d0f72a12c7e8458d84295 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 3 Jun 2019 12:12:07 +0100 Subject: [PATCH 033/210] Rename autoMinMasterNodes to autoManageMasterNodes (#42789) Renames the `ClusterScope` attribute `autoMinMasterNodes` to reflect its broader meaning since 7.0. Backport of the relevant part of #42700 to `7.x`. --- .../rest/discovery/Zen2RestApiIT.java | 2 +- .../admin/indices/exists/IndicesExistsIT.java | 2 +- .../master/IndexingMasterFailoverIT.java | 2 +- .../cluster/MinimumMasterNodesIT.java | 2 +- .../cluster/SpecificMasterNodesIT.java | 2 +- .../UnsafeBootstrapAndDetachCommandIT.java | 2 +- .../single/SingleNodeDiscoveryIT.java | 2 +- .../gateway/RecoverAfterNodesIT.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 13 ++-- .../test/InternalTestCluster.java | 59 +++++++++---------- .../test/disruption/NetworkDisruptionIT.java | 2 +- 11 files changed, 44 insertions(+), 46 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/discovery/Zen2RestApiIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/discovery/Zen2RestApiIT.java index 83d4c3419ef..eb2eb14b66f 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/discovery/Zen2RestApiIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/discovery/Zen2RestApiIT.java @@ -47,7 +47,7 @@ import static org.hamcrest.core.Is.is; // These tests are here today so they have access to a proper REST client. They cannot be in :server:integTest since the REST client needs a // proper transport implementation, and they cannot be REST tests today since they need to restart nodes. When #35599 and friends land we // should be able to move these tests to run against a proper cluster instead. TODO do this. -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0, autoMinMasterNodes = false) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0, autoManageMasterNodes = false) public class Zen2RestApiIT extends ESNetty4IntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java index 7cfc2ea1f28..0a0c1b66d8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java @@ -31,7 +31,7 @@ import java.io.IOException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; @ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0, - autoMinMasterNodes = false) + autoManageMasterNodes = false) public class IndicesExistsIT extends ESIntegTestCase { public void testIndexExistsWithBlocksInPlace() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 1317183f286..45d8f4c8c0b 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -39,7 +39,7 @@ import java.util.concurrent.CyclicBarrier; import static org.hamcrest.Matchers.equalTo; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class IndexingMasterFailoverIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index cb1443bdf37..164c74423aa 100644 --- a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -62,7 +62,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) @TestLogging("_root:DEBUG,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.cluster.coordination:TRACE") public class MinimumMasterNodesIT extends ESIntegTestCase { diff --git a/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index f80a5befa83..d8be488f5d4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -39,7 +39,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) @TestLogging("_root:DEBUG,org.elasticsearch.action.admin.cluster.state:TRACE") public class SpecificMasterNodesIT extends ESIntegTestCase { diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java b/server/src/test/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java index b2ed28dcdaa..c7373401913 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitC import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) @TestLogging("_root:DEBUG,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.cluster.coordination:TRACE") public class UnsafeBootstrapAndDetachCommandIT extends ESIntegTestCase { diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 5d8e3407e18..ac6a2189092 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -50,7 +50,7 @@ import static org.hamcrest.Matchers.not; numDataNodes = 1, numClientNodes = 0, supportsDedicatedMasters = false, - autoMinMasterNodes = false) + autoManageMasterNodes = false) public class SingleNodeDiscoveryIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java b/server/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java index 86976d553fa..84188f80aae 100644 --- a/server/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java @@ -34,7 +34,7 @@ import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; -@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class RecoverAfterNodesIT extends ESIntegTestCase { private static final TimeValue BLOCK_WAIT_TIMEOUT = TimeValue.timeValueSeconds(10); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 365687db346..21e7dc3f683 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1671,10 +1671,11 @@ public abstract class ESIntegTestCase extends ESTestCase { boolean supportsDedicatedMasters() default true; /** - * The cluster automatically manages the {@link ElectMasterService#DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING} by default - * as nodes are started and stopped. Set this to false to manage the setting manually. + * Indicates whether the cluster automatically manages cluster bootstrapping and the removal of any master-eligible nodes as well + * as {@link ElectMasterService#DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING} if running the pre-7.0 cluster coordination + * implementation. If set to {@code false} then the tests must manage these things explicitly. */ - boolean autoMinMasterNodes() default true; + boolean autoManageMasterNodes() default true; /** * Returns the number of client nodes in the cluster. Default is {@link InternalTestCluster#DEFAULT_NUM_CLIENT_NODES}, a @@ -1768,9 +1769,9 @@ public abstract class ESIntegTestCase extends ESTestCase { return annotation == null ? true : annotation.supportsDedicatedMasters(); } - private boolean getAutoMinMasterNodes() { + private boolean getAutoManageMasterNodes() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); - return annotation == null ? true : annotation.autoMinMasterNodes(); + return annotation == null ? true : annotation.autoManageMasterNodes(); } private int getNumDataNodes() { @@ -1920,7 +1921,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } mockPlugins = mocks; } - return new InternalTestCluster(seed, createTempDir(), supportsDedicatedMasters, getAutoMinMasterNodes(), + return new InternalTestCluster(seed, createTempDir(), supportsDedicatedMasters, getAutoManageMasterNodes(), minNumDataNodes, maxNumDataNodes, InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), nodePrefix, mockPlugins, getClientWrapper(), forbidPrivateIndexSettings()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 60061877754..66208479e06 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -238,7 +238,7 @@ public final class InternalTestCluster extends TestCluster { private final ExecutorService executor; - private final boolean autoManageMinMasterNodes; + private final boolean autoManageMasterNodes; private final Collection> mockPlugins; @@ -261,7 +261,7 @@ public final class InternalTestCluster extends TestCluster { final long clusterSeed, final Path baseDir, final boolean randomlyAddDedicatedMasters, - final boolean autoManageMinMasterNodes, + final boolean autoManageMasterNodes, final int minNumDataNodes, final int maxNumDataNodes, final String clusterName, @@ -274,7 +274,7 @@ public final class InternalTestCluster extends TestCluster { clusterSeed, baseDir, randomlyAddDedicatedMasters, - autoManageMinMasterNodes, + autoManageMasterNodes, minNumDataNodes, maxNumDataNodes, clusterName, @@ -290,7 +290,7 @@ public final class InternalTestCluster extends TestCluster { final long clusterSeed, final Path baseDir, final boolean randomlyAddDedicatedMasters, - final boolean autoManageMinMasterNodes, + final boolean autoManageMasterNodes, final int minNumDataNodes, final int maxNumDataNodes, final String clusterName, @@ -301,7 +301,7 @@ public final class InternalTestCluster extends TestCluster { final Function clientWrapper, final boolean forbidPrivateIndexSettings) { super(clusterSeed); - this.autoManageMinMasterNodes = autoManageMinMasterNodes; + this.autoManageMasterNodes = autoManageMasterNodes; this.clientWrapper = clientWrapper; this.forbidPrivateIndexSettings = forbidPrivateIndexSettings; this.baseDir = baseDir; @@ -359,7 +359,7 @@ public final class InternalTestCluster extends TestCluster { "[{}] (data) nodes and [{}] coord only nodes (min_master_nodes are [{}])", clusterName, SeedUtils.formatSeed(clusterSeed), numSharedDedicatedMasterNodes, numSharedDataNodes, numSharedCoordOnlyNodes, - autoManageMinMasterNodes ? "auto-managed" : "manual"); + autoManageMasterNodes ? "auto-managed" : "manual"); this.nodeConfigurationSource = nodeConfigurationSource; numDataPaths = random.nextInt(5) == 0 ? 2 + random.nextInt(3) : 1; Builder builder = Settings.builder(); @@ -409,12 +409,11 @@ public final class InternalTestCluster extends TestCluster { /** * Sets {@link #bootstrapMasterNodeIndex} to the given value, see {@link #bootstrapMasterNodeWithSpecifiedIndex(List)} * for the description of how this field is used. - * It's only possible to change {@link #bootstrapMasterNodeIndex} value if autoManageMinMasterNodes is false. + * It's only possible to change {@link #bootstrapMasterNodeIndex} value if autoManageMasterNodes is false. */ public void setBootstrapMasterNodeIndex(int bootstrapMasterNodeIndex) { - if (autoManageMinMasterNodes && bootstrapMasterNodeIndex != -1) { - throw new AssertionError("bootstrapMasterNodeIndex should be -1 if autoManageMinMasterNodes is true"); - } + assert autoManageMasterNodes == false || bootstrapMasterNodeIndex == -1 + : "bootstrapMasterNodeIndex should be -1 if autoManageMasterNodes is true, but was " + bootstrapMasterNodeIndex; this.bootstrapMasterNodeIndex = bootstrapMasterNodeIndex; } @@ -425,7 +424,7 @@ public final class InternalTestCluster extends TestCluster { /** returns true if the {@link ElectMasterService#DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING} setting is auto managed by this cluster */ public boolean getAutoManageMinMasterNode() { - return autoManageMinMasterNodes; + return autoManageMasterNodes; } public String[] getNodeNames() { @@ -653,10 +652,10 @@ public final class InternalTestCluster extends TestCluster { final boolean usingSingleNodeDiscovery = discoveryType.equals("single-node"); final boolean usingZen1 = usingZen1(updatedSettings.build()); if (usingSingleNodeDiscovery == false) { - if (autoManageMinMasterNodes) { - assertThat("min master nodes may not be set when auto managed", + if (autoManageMasterNodes) { + assertThat("min master nodes may not be set when master nodes are auto managed", updatedSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()), nullValue()); - assertThat("automatically managing min master nodes require nodes to complete a join cycle when starting", + assertThat("if master nodes are automatically managed then nodes must complete a join cycle when starting", updatedSettings.get(INITIAL_STATE_TIMEOUT_SETTING.getKey()), nullValue()); if (usingZen1) { @@ -1135,7 +1134,7 @@ public final class InternalTestCluster extends TestCluster { if (wipeData) { wipePendingDataDirectories(); } - if (nodes.size() > 0 && autoManageMinMasterNodes) { + if (nodes.size() > 0 && autoManageMasterNodes) { updateMinMasterNodes(getMasterNodesCount()); } logger.debug("Cluster hasn't changed - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", @@ -1169,7 +1168,7 @@ public final class InternalTestCluster extends TestCluster { assert newSize == numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes; final int numberOfMasterNodes = numSharedDedicatedMasterNodes > 0 ? numSharedDedicatedMasterNodes : numSharedDataNodes; final int defaultMinMasterNodes = (numberOfMasterNodes / 2) + 1; - final List toStartAndPublish = new ArrayList<>(); // we want to start nodes in one go due to min master nodes + final List toStartAndPublish = new ArrayList<>(); // we want to start nodes in one go final Runnable onTransportServiceStarted = () -> rebuildUnicastHostFiles(toStartAndPublish); final List settings = new ArrayList<>(); @@ -1202,7 +1201,7 @@ public final class InternalTestCluster extends TestCluster { .map(Node.NODE_NAME_SETTING::get) .collect(Collectors.toList()); - if (prevNodeCount == 0 && autoManageMinMasterNodes) { + if (prevNodeCount == 0 && autoManageMasterNodes) { if (numSharedDedicatedMasterNodes > 0) { autoBootstrapMasterNodeIndex = RandomNumbers.randomIntBetween(random, 0, numSharedDedicatedMasterNodes - 1); } else if (numSharedDataNodes > 0) { @@ -1225,7 +1224,7 @@ public final class InternalTestCluster extends TestCluster { nextNodeId.set(newSize); assert size() == newSize; - if (autoManageMinMasterNodes && newSize > 0) { + if (autoManageMasterNodes && newSize > 0) { validateClusterFormed(); } logger.debug("Cluster is consistent again - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", @@ -1670,7 +1669,7 @@ public final class InternalTestCluster extends TestCluster { .filter(nac -> nodes.containsKey(nac.name) == false) // filter out old masters .count(); final int currentMasters = getMasterNodesCount(); - if (autoManageMinMasterNodes && currentMasters > 0 && newMasters > 0 && + if (autoManageMasterNodes && currentMasters > 0 && newMasters > 0 && getMinMasterNodes(currentMasters + newMasters) <= currentMasters) { // if we're adding too many master-eligible nodes at once, we can't update the min master setting before adding the nodes. updateMinMasterNodes(currentMasters + newMasters); @@ -1689,7 +1688,7 @@ public final class InternalTestCluster extends TestCluster { } nodeAndClients.forEach(this::publishNode); - if (autoManageMinMasterNodes && currentMasters > 0 && newMasters > 0 && + if (autoManageMasterNodes && currentMasters > 0 && newMasters > 0 && getMinMasterNodes(currentMasters + newMasters) > currentMasters) { // update once masters have joined validateClusterFormed(); @@ -1802,7 +1801,7 @@ public final class InternalTestCluster extends TestCluster { Set excludedNodeIds = excludeMasters(Collections.singleton(nodeAndClient)); final Settings newSettings = nodeAndClient.closeForRestart(callback, - autoManageMinMasterNodes ? getMinMasterNodes(getMasterNodesCount()) : -1); + autoManageMasterNodes ? getMinMasterNodes(getMasterNodesCount()) : -1); removeExclusions(excludedNodeIds); @@ -1822,10 +1821,8 @@ public final class InternalTestCluster extends TestCluster { } if (callback.validateClusterForming() || excludedNodeIds.isEmpty() == false) { - // we have to validate cluster size if updateMinMaster == true, because we need the - // second node to join in order to increment min_master_nodes back to 2. - // we also have to do via the node that was just restarted as it may be that the master didn't yet process - // the fact it left + // we have to validate cluster size to ensure that the restarted node has rejoined the cluster if it was master-eligible; + // we have to do this via the node that was just restarted as it may be that the master didn't yet process the fact that it left validateClusterFormed(nodeAndClient.name); } @@ -1845,7 +1842,7 @@ public final class InternalTestCluster extends TestCluster { private Set excludeMasters(Collection nodeAndClients) { assert Thread.holdsLock(this); final Set excludedNodeIds = new HashSet<>(); - if (autoManageMinMasterNodes && nodeAndClients.size() > 0) { + if (autoManageMasterNodes && nodeAndClients.size() > 0) { final long currentMasters = nodes.values().stream().filter(NodeAndClient::isMasterEligible).count(); final long stoppingMasters = nodeAndClients.stream().filter(NodeAndClient::isMasterEligible).count(); @@ -1896,7 +1893,7 @@ public final class InternalTestCluster extends TestCluster { final Settings[] newNodeSettings = new Settings[nextNodeId.get()]; Map, List> nodesByRoles = new HashMap<>(); Set[] rolesOrderedByOriginalStartupOrder = new Set[nextNodeId.get()]; - final int minMasterNodes = autoManageMinMasterNodes ? getMinMasterNodes(getMasterNodesCount()) : -1; + final int minMasterNodes = autoManageMasterNodes ? getMinMasterNodes(getMasterNodesCount()) : -1; for (NodeAndClient nodeAndClient : nodes.values()) { callback.doAfterNodes(numNodesRestarted++, nodeAndClient.nodeClient()); logger.info("Stopping and resetting node [{}] ", nodeAndClient.name); @@ -2090,7 +2087,7 @@ public final class InternalTestCluster extends TestCluster { public synchronized List startNodes(Settings... extraSettings) { final int newMasterCount = Math.toIntExact(Stream.of(extraSettings).filter(Node.NODE_MASTER_SETTING::get).count()); final int defaultMinMasterNodes; - if (autoManageMinMasterNodes) { + if (autoManageMasterNodes) { defaultMinMasterNodes = getMinMasterNodes(getMasterNodesCount() + newMasterCount); } else { defaultMinMasterNodes = -1; @@ -2098,7 +2095,7 @@ public final class InternalTestCluster extends TestCluster { final List nodes = new ArrayList<>(); final int prevMasterCount = getMasterNodesCount(); int autoBootstrapMasterNodeIndex = - prevMasterCount == 0 && autoManageMinMasterNodes && newMasterCount > 0 && Arrays.stream(extraSettings) + prevMasterCount == 0 && autoManageMasterNodes && newMasterCount > 0 && Arrays.stream(extraSettings) .allMatch(s -> Node.NODE_MASTER_SETTING.get(s) == false || ZEN2_DISCOVERY_TYPE.equals(DISCOVERY_TYPE_SETTING.get(s))) ? RandomNumbers.randomIntBetween(random, 0, newMasterCount - 1) : -1; @@ -2133,7 +2130,7 @@ public final class InternalTestCluster extends TestCluster { nodes.add(nodeAndClient); } startAndPublishNodesAndClients(nodes); - if (autoManageMinMasterNodes) { + if (autoManageMasterNodes) { validateClusterFormed(); } return nodes.stream().map(NodeAndClient::getName).collect(Collectors.toList()); @@ -2165,7 +2162,7 @@ public final class InternalTestCluster extends TestCluster { * @param eligibleMasterNodeCount the number of master eligible nodes to use as basis for the min master node setting */ private void updateMinMasterNodes(int eligibleMasterNodeCount) { - assert autoManageMinMasterNodes; + assert autoManageMasterNodes; final int minMasterNodes = getMinMasterNodes(eligibleMasterNodeCount); if (getMasterNodesCount() > 0) { // there should be at least one master to update diff --git a/test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java b/test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java index a2f6b3ed654..a018bc16631 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java +++ b/test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java @@ -37,7 +37,7 @@ import java.util.Set; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class NetworkDisruptionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { From 9a9ee9abedbfcaf49fd134349c60e11ab49f0859 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 3 Jun 2019 13:22:10 +0200 Subject: [PATCH 034/210] [Docs] Add example to reimplement stempel analyzer (#42676) Adding an example of how to re-implement the polish stempel analyzer in case a user want to modify or extend it. In order for the analyzer to be able to use polish stopwords, also registering a polish_stop filter for the stempel plugin. Closes #13150 --- docs/plugins/analysis-stempel.asciidoc | 104 +++++++++++++++++- .../pl/PolishStopTokenFilterFactory.java | 73 ++++++++++++ .../stempel/AnalysisStempelPlugin.java | 4 +- 3 files changed, 178 insertions(+), 3 deletions(-) create mode 100644 plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStopTokenFilterFactory.java diff --git a/docs/plugins/analysis-stempel.asciidoc b/docs/plugins/analysis-stempel.asciidoc index a5526129a97..cd234c8d427 100644 --- a/docs/plugins/analysis-stempel.asciidoc +++ b/docs/plugins/analysis-stempel.asciidoc @@ -12,7 +12,107 @@ include::install_remove.asciidoc[] [[analysis-stempel-tokenizer]] [float] -==== `stempel` tokenizer and token filter +==== `stempel` tokenizer and token filters -The plugin provides the `polish` analyzer and `polish_stem` token filter, +The plugin provides the `polish` analyzer and the `polish_stem` and `polish_stop` token filters, which are not configurable. + +==== Reimplementing and extending the analyzers + +The `polish` analyzer could be reimplemented as a `custom` analyzer that can +then be extended and configured differently as follows: + +[source,js] +---------------------------------------------------- +PUT /stempel_example +{ + "settings": { + "analysis": { + "analyzer": { + "rebuilt_stempel": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "polish_stop", + "polish_stem" + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: stempel_example, first: polish, second: rebuilt_stempel}\nendyaml\n/] + +[[analysis-polish-stop]] +==== `polish_stop` token filter + +The `polish_stop` token filter filters out Polish stopwords (`_polish_`), and +any other custom stopwords specified by the user. This filter only supports +the predefined `_polish_` stopwords list. If you want to use a different +predefined list, then use the +{ref}/analysis-stop-tokenfilter.html[`stop` token filter] instead. + +[source,js] +-------------------------------------------------- +PUT /polish_stop_example +{ + "settings": { + "index": { + "analysis": { + "analyzer": { + "analyzer_with_stop": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "polish_stop" + ] + } + }, + "filter": { + "polish_stop": { + "type": "polish_stop", + "stopwords": [ + "_polish_", + "jeść" + ] + } + } + } + } + } +} + +GET polish_stop_example/_analyze +{ + "analyzer": "analyzer_with_stop", + "text": "Gdzie kucharek sześć, tam nie ma co jeść." +} +-------------------------------------------------- +// CONSOLE + +The above request returns: + +[source,js] +-------------------------------------------------- +{ + "tokens" : [ + { + "token" : "kucharek", + "start_offset" : 6, + "end_offset" : 14, + "type" : "", + "position" : 1 + }, + { + "token" : "sześć", + "start_offset" : 15, + "end_offset" : 20, + "type" : "", + "position" : 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStopTokenFilterFactory.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStopTokenFilterFactory.java new file mode 100644 index 00000000000..32897ad29d7 --- /dev/null +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStopTokenFilterFactory.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis.pl; + + +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.pl.PolishAnalyzer; +import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.Analysis; + +import java.util.Map; +import java.util.Set; + +import static java.util.Collections.singletonMap; + +public class PolishStopTokenFilterFactory extends AbstractTokenFilterFactory { + private static final Map> NAMED_STOP_WORDS = singletonMap("_polish_", PolishAnalyzer.getDefaultStopSet()); + + private final CharArraySet stopWords; + + private final boolean ignoreCase; + + private final boolean removeTrailing; + + public PolishStopTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { + super(indexSettings, name, settings); + this.ignoreCase = settings.getAsBoolean("ignore_case", false); + this.removeTrailing = settings.getAsBoolean("remove_trailing", true); + this.stopWords = Analysis.parseWords(env, settings, "stopwords", + PolishAnalyzer.getDefaultStopSet(), NAMED_STOP_WORDS, ignoreCase); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + if (removeTrailing) { + return new StopFilter(tokenStream, stopWords); + } else { + return new SuggestStopFilter(tokenStream, stopWords); + } + } + + public Set stopWords() { + return stopWords; + } + + public boolean ignoreCase() { + return ignoreCase; + } + +} diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java index 98dd9634fb9..a523d7dcaa0 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.pl.PolishAnalyzerProvider; import org.elasticsearch.index.analysis.pl.PolishStemTokenFilterFactory; +import org.elasticsearch.index.analysis.pl.PolishStopTokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; @@ -35,7 +36,8 @@ import static java.util.Collections.singletonMap; public class AnalysisStempelPlugin extends Plugin implements AnalysisPlugin { @Override public Map> getTokenFilters() { - return singletonMap("polish_stem", PolishStemTokenFilterFactory::new); + return Map.of("polish_stem", PolishStemTokenFilterFactory::new, + "polish_stop", PolishStopTokenFilterFactory::new); } @Override From bde0137e071e7b4b5684f3a6018098989a4efd64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 3 Jun 2019 13:36:20 +0200 Subject: [PATCH 035/210] Fix compile issue of earlier commit on 7.x --- .../plugin/analysis/stempel/AnalysisStempelPlugin.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java index a523d7dcaa0..1ba78e75c7c 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java @@ -29,6 +29,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; +import java.util.HashMap; import java.util.Map; import static java.util.Collections.singletonMap; @@ -36,8 +37,10 @@ import static java.util.Collections.singletonMap; public class AnalysisStempelPlugin extends Plugin implements AnalysisPlugin { @Override public Map> getTokenFilters() { - return Map.of("polish_stem", PolishStemTokenFilterFactory::new, - "polish_stop", PolishStopTokenFilterFactory::new); + Map> filters = new HashMap<>(); + filters.put("polish_stem", PolishStemTokenFilterFactory::new); + filters.put("polish_stop", PolishStopTokenFilterFactory::new); + return filters; } @Override From 10aca87389b1260238e612474921788f79581076 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 3 Jun 2019 12:18:14 +0100 Subject: [PATCH 036/210] [ML] Better detection of binary input in find_file_structure (#42707) This change helps to prevent the situation where a binary file uploaded to the find_file_structure endpoint is detected as being text in the UTF-16 character set, and then causes a large amount of CPU to be spent analysing the bogus text structure. The approach is to check the distribution of zero bytes between odd and even file positions, on the grounds that UTF-16BE or UTF16-LE would have a very skewed distribution. --- .../FileStructureFinderManager.java | 19 +++++++++--- .../FileStructureFinderManagerTests.java | 29 ++++++++++++++++++- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java index 5332f18e9f0..4f26276c3df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java @@ -175,7 +175,8 @@ public final class FileStructureFinderManager { // Determine some extra characteristics of the input to compensate for some deficiencies of ICU4J boolean pureAscii = true; - boolean containsZeroBytes = false; + int evenPosZeroCount = 0; + int oddPosZeroCount = 0; inputStream.mark(BUFFER_SIZE); byte[] workspace = new byte[BUFFER_SIZE]; int remainingLength = BUFFER_SIZE; @@ -184,17 +185,22 @@ public final class FileStructureFinderManager { if (bytesRead <= 0) { break; } - for (int i = 0; i < bytesRead && containsZeroBytes == false; ++i) { + for (int i = 0; i < bytesRead; ++i) { if (workspace[i] == 0) { - containsZeroBytes = true; pureAscii = false; + if (i % 2 == 0) { + ++evenPosZeroCount; + } else { + ++oddPosZeroCount; + } } else { pureAscii = pureAscii && workspace[i] > 0 && workspace[i] < 128; } } remainingLength -= bytesRead; - } while (containsZeroBytes == false && remainingLength > 0); + } while (remainingLength > 0); inputStream.reset(); + boolean containsZeroBytes = evenPosZeroCount > 0 || oddPosZeroCount > 0; timeoutChecker.check("character set detection"); if (pureAscii) { @@ -235,6 +241,11 @@ public final class FileStructureFinderManager { if (containsZeroBytes && spaceEncodingContainsZeroByte == false) { explanation.add("Character encoding [" + name + "] matched the input with [" + charsetMatch.getConfidence() + "%] confidence but was rejected as the input contains zero bytes and the [" + name + "] encoding does not"); + } else if (containsZeroBytes && 3 * oddPosZeroCount > 2 * evenPosZeroCount && 3 * evenPosZeroCount > 2 * oddPosZeroCount) { + explanation.add("Character encoding [" + name + "] matched the input with [" + charsetMatch.getConfidence() + + "%] confidence but was rejected as the distribution of zero bytes between odd and even positions in the " + + "file is very close - [" + evenPosZeroCount + "] and [" + oddPosZeroCount + "] in the first [" + + (BUFFER_SIZE / 1024) + "kB] of input"); } else { explanation.add("Using character encoding [" + name + "], which matched the input with [" + charsetMatch.getConfidence() + "%] confidence"); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java index 978f1c5286d..f68d8edc612 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java @@ -14,6 +14,7 @@ import org.junit.After; import org.junit.Before; import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PipedInputStream; @@ -25,6 +26,7 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.ml.filestructurefinder.FileStructureOverrides.EMPTY_OVERRIDES; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsInstanceOf.instanceOf; @@ -54,7 +56,7 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { } } - public void testFindCharsetGivenBinary() throws Exception { + public void testFindCharsetGivenRandomBinary() throws Exception { // This input should never match a single byte character set. ICU4J will sometimes decide // that it matches a double byte character set, hence the two assertion branches. @@ -73,6 +75,31 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { } } + public void testFindCharsetGivenBinaryNearUtf16() throws Exception { + + // This input should never match a single byte character set. ICU4J will probably decide + // that it matches both UTF-16BE and UTF-16LE, but we should reject these as there's no + // clear winner. + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + if (randomBoolean()) { + stream.write(randomAlphaOfLengthBetween(3, 4).getBytes(StandardCharsets.UTF_16LE)); + } + for (int i = 0; i < 50; ++i) { + stream.write(randomAlphaOfLengthBetween(5, 6).getBytes(StandardCharsets.UTF_16BE)); + stream.write(randomAlphaOfLengthBetween(5, 6).getBytes(StandardCharsets.UTF_16LE)); + } + if (randomBoolean()) { + stream.write(randomAlphaOfLengthBetween(3, 4).getBytes(StandardCharsets.UTF_16BE)); + } + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> structureFinderManager.findCharset(explanation, new ByteArrayInputStream(stream.toByteArray()), NOOP_TIMEOUT_CHECKER)); + + assertEquals("Could not determine a usable character encoding for the input - could it be binary data?", e.getMessage()); + assertThat(explanation.toString(), + containsString("but was rejected as the distribution of zero bytes between odd and even positions in the file is very close")); + } + public void testMakeBestStructureGivenNdJson() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, NDJSON_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(NdJsonFileStructureFinder.class)); From 0253927ec45c9f3b3e0f4236f67f5e8e4426ce41 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 3 Jun 2019 06:53:44 -0500 Subject: [PATCH 037/210] [ML Data Frame] Refactor stop logic (#42644) (#42763) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Revert "invalid test" This reverts commit 9dd8b52c13c716918ff97e6527aaf43aefc4695d. * Testing * mend * Revert "[ML Data Frame] Mute Data Frame tests" This reverts commit 5d837fa312b0e41a77a65462667a2d92d1114567. * Call onStop and onAbort outside atomic update * Don’t update CS * Tidying up * Remove invalid test that asserted logic that has been removed * Add stopped event * Revert "Add stopped event" This reverts commit 02ba992f4818bebd838e1c7678bd2e1cc090bfab. * Adding check for STOPPED in saveState --- .../core/indexing/AsyncTwoPhaseIndexer.java | 29 ++++---- .../indexing/AsyncTwoPhaseIndexerTests.java | 19 ------ .../integration/DataFrameTransformIT.java | 1 - .../integration/DataFrameAuditorIT.java | 2 - .../DataFrameConfigurationIndexIT.java | 2 - .../DataFrameGetAndGetStatsIT.java | 2 - .../integration/DataFrameMetaDataIT.java | 2 - .../integration/DataFramePivotRestIT.java | 2 - .../DataFrameTaskFailedStateIT.java | 2 - .../integration/DataFrameUsageIT.java | 2 - .../transforms/DataFrameTransformTask.java | 66 ++++++++----------- .../test/data_frame/transforms_start_stop.yml | 45 +++++++++++++ 12 files changed, 87 insertions(+), 87 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index 0c4477b6b70..f9bbf890fe6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -90,28 +90,21 @@ public abstract class AsyncTwoPhaseIndexer { + return state.updateAndGet(previousState -> { if (previousState == IndexerState.INDEXING) { return IndexerState.STOPPING; } else if (previousState == IndexerState.STARTED) { - wasStartedAndSetStopped.set(true); return IndexerState.STOPPED; } else { return previousState; } }); - - if (wasStartedAndSetStopped.get()) { - onStop(); - } - return currentState; } /** @@ -288,20 +281,22 @@ public abstract class AsyncTwoPhaseIndexer { + AtomicBoolean callOnStop = new AtomicBoolean(false); + AtomicBoolean callOnAbort = new AtomicBoolean(false); + IndexerState updatedState = state.updateAndGet(prev -> { switch (prev) { case INDEXING: // ready for another job return IndexerState.STARTED; case STOPPING: + callOnStop.set(true); // must be started again - onStop(); return IndexerState.STOPPED; case ABORTING: + callOnAbort.set(true); // abort and exit - onAbort(); return IndexerState.ABORTING; // This shouldn't matter, since onAbort() will kill the task first case STOPPED: @@ -316,6 +311,14 @@ public abstract class AsyncTwoPhaseIndexer state = new AtomicReference<>(IndexerState.STOPPED); - final ExecutorService executor = Executors.newFixedThreadPool(1); - try { - CountDownLatch countDownLatch = new CountDownLatch(1); - MockIndexer indexer = new MockIndexer(executor, state, 2, countDownLatch, false); - indexer.start(); - assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); - countDownLatch.countDown(); - assertTrue(awaitBusy(() -> isFinished.get())); - - indexer.stop(); - assertTrue(isStopped.get()); - assertThat(indexer.getState(), equalTo(IndexerState.STOPPED)); - } finally { - executor.shutdownNow(); - } - } - public void testStop_WhileIndexing() throws InterruptedException { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java index 1ec425c6416..69fb980871d 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java @@ -30,7 +30,6 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase { cleanUp(); } - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public void testDataFrameTransformCrud() throws Exception { createReviewsIndex(); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java index 7dc79c1ae8f..9884c9bb679 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; import org.junit.Before; @@ -23,7 +22,6 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFrameAuditorIT extends DataFrameRestTestCase { private static final String TEST_USER_NAME = "df_admin_plus_data"; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java index d7e12cf2bee..681599331c8 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.dataframe.integration; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -23,7 +22,6 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFrameConfigurationIndexIT extends DataFrameRestTestCase { /** diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java index 9bac6ca0b40..d9927cd09ed 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.dataframe.DataFrameField; @@ -22,7 +21,6 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswo import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFrameGetAndGetStatsIT extends DataFrameRestTestCase { private static final String TEST_USER_NAME = "df_user"; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java index 5b95d1daead..26a957ea055 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -16,7 +15,6 @@ import org.junit.Before; import java.io.IOException; import java.util.Map; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFrameMetaDataIT extends DataFrameRestTestCase { private boolean indicesCreated = false; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 36f95e599ff..933fcc6c8e5 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.junit.Before; @@ -22,7 +21,6 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswo import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFramePivotRestIT extends DataFrameRestTestCase { private static final String TEST_USER_NAME = "df_admin_plus_data"; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java index 7b63644dd34..96aeeda8755 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; @@ -20,7 +19,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase { public void testDummy() { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java index f98fa6a2713..4f209c5a9f3 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -23,7 +22,6 @@ import java.util.Map; import static org.elasticsearch.xpack.core.dataframe.DataFrameField.INDEX_DOC_TYPE; import static org.elasticsearch.xpack.dataframe.DataFrameFeatureSet.PROVIDED_STATS; -@LuceneTestCase.AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42344") public class DataFrameUsageIT extends DataFrameRestTestCase { private boolean indicesCreated = false; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 13deab6748c..575cd4c15bd 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -66,7 +66,7 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S private final Map initialPosition; private final IndexerState initialIndexerState; - private final SetOnce indexer = new SetOnce<>(); + private final SetOnce indexer = new SetOnce<>(); private final AtomicReference taskState; private final AtomicReference stateReason; @@ -125,7 +125,7 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S return getState(); } - private DataFrameIndexer getIndexer() { + private ClientDataFrameIndexer getIndexer() { return indexer.get(); } @@ -236,7 +236,10 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S return; } - getIndexer().stop(); + IndexerState state = getIndexer().stop(); + if (state == IndexerState.STOPPED) { + getIndexer().doSaveState(state, getIndexer().getPosition(), () -> getIndexer().onStop()); + } } @Override @@ -530,11 +533,17 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S next.run(); return; } + // If we are `STOPPED` on a `doSaveState` call, that indicates we transitioned to `STOPPED` from `STOPPING` + // OR we called `doSaveState` manually as the indexer was not actively running. + // Since we save the state to an index, we should make sure that our task state is in parity with the indexer state + if (indexerState.equals(IndexerState.STOPPED)) { + transformTask.setTaskStateStopped(); + } final DataFrameTransformState state = new DataFrameTransformState( transformTask.taskState.get(), indexerState, - getPosition(), + position, transformTask.currentCheckpoint.get(), transformTask.stateReason.get(), getProgress()); @@ -542,28 +551,18 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S // Persisting stats when we call `doSaveState` should be ok as we only call it on a state transition and // only every-so-often when doing the bulk indexing calls. See AsyncTwoPhaseIndexer#onBulkResponse for current periodicity - ActionListener> updateClusterStateListener = ActionListener.wrap( - task -> { - transformsConfigManager.putOrUpdateTransformStats( - new DataFrameTransformStateAndStats(transformId, state, getStats(), - DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null - ActionListener.wrap( - r -> { - next.run(); - }, - statsExc -> { - logger.error("Updating stats of transform [" + transformConfig.getId() + "] failed", statsExc); - next.run(); - } - )); - }, - exc -> { - logger.error("Updating persistent state of transform [" + transformConfig.getId() + "] failed", exc); - next.run(); - } - ); - - transformTask.persistStateToClusterState(state, updateClusterStateListener); + transformsConfigManager.putOrUpdateTransformStats( + new DataFrameTransformStateAndStats(transformId, state, getStats(), + DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null + ActionListener.wrap( + r -> { + next.run(); + }, + statsExc -> { + logger.error("Updating stats of transform [" + transformConfig.getId() + "] failed", statsExc); + next.run(); + } + )); } @Override @@ -602,20 +601,7 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S protected void onStop() { auditor.info(transformConfig.getId(), "Indexer has stopped"); logger.info("Data frame transform [{}] indexer has stopped", transformConfig.getId()); - - transformTask.setTaskStateStopped(); - transformsConfigManager.putOrUpdateTransformStats( - new DataFrameTransformStateAndStats(transformId, transformTask.getState(), getStats(), - DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null - ActionListener.wrap( - r -> { - transformTask.shutdown(); - }, - statsExc -> { - transformTask.shutdown(); - logger.error("Updating saving stats of transform [" + transformConfig.getId() + "] failed", statsExc); - } - )); + transformTask.shutdown(); } @Override diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml index 31f80033e7b..2686c57fd06 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml @@ -190,8 +190,10 @@ teardown: - do: data_frame.stop_data_frame_transform: transform_id: "airline-transform-start-stop" + wait_for_completion: true - match: { acknowledged: true } + - do: data_frame.get_data_frame_transform_stats: transform_id: "airline-transform-start-later" @@ -209,3 +211,46 @@ teardown: - do: data_frame.delete_data_frame_transform: transform_id: "airline-transform-start-later" + +--- +"Test stop all": + - do: + data_frame.put_data_frame_transform: + transform_id: "airline-transform-stop-all" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-data-start-later" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + } + } + - do: + data_frame.start_data_frame_transform: + transform_id: "airline-transform-stop-all" + - match: { acknowledged: true } + + - do: + data_frame.start_data_frame_transform: + transform_id: "airline-transform-start-stop" + - match: { acknowledged: true } + + - do: + data_frame.stop_data_frame_transform: + transform_id: "_all" + wait_for_completion: true + - match: { acknowledged: true } + + - do: + data_frame.get_data_frame_transform_stats: + transform_id: "*" + - match: { count: 2 } + - match: { transforms.0.state.indexer_state: "stopped" } + - match: { transforms.0.state.task_state: "stopped" } + - match: { transforms.1.state.indexer_state: "stopped" } + - match: { transforms.1.state.task_state: "stopped" } + + - do: + data_frame.delete_data_frame_transform: + transform_id: "airline-transform-stop-all" From b61202b0a8508e5a52a164036dc9c3ec862e1557 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 3 Jun 2019 13:44:06 +0100 Subject: [PATCH 038/210] [ML] Add a limit on line merging in find_file_structure (#42501) When analysing a semi-structured text file the find_file_structure endpoint merges lines to form multi-line messages using the assumption that the first line in each message contains the timestamp. However, if the timestamp is misdetected then this can lead to excessive numbers of lines being merged to form massive messages. This commit adds a line_merge_size_limit setting (default 10000 characters) that halts the analysis if a message bigger than this is created. This prevents significant CPU time being spent subsequently trying to determine the internal structure of the huge bogus messages. --- .../client/ml/FindFileStructureRequest.java | 18 +++++++- .../ml/FindFileStructureRequestTests.java | 4 ++ .../ml/apis/find-file-structure.asciidoc | 7 +++ .../ml/action/FindFileStructureAction.java | 26 ++++++++++- .../FindFileStructureActionRequestTests.java | 16 +++++++ .../TransportFindFileStructureAction.java | 2 +- .../DelimitedFileStructureFinderFactory.java | 3 +- .../FileStructureFinderFactory.java | 4 +- .../FileStructureFinderManager.java | 44 +++++++++++-------- .../NdJsonFileStructureFinderFactory.java | 3 +- .../TextLogFileStructureFinder.java | 15 ++++++- .../TextLogFileStructureFinderFactory.java | 4 +- .../XmlFileStructureFinderFactory.java | 2 +- .../ml/rest/RestFindFileStructureAction.java | 2 + .../DelimitedFileStructureFinderTests.java | 26 +++++------ .../FileStructureFinderManagerTests.java | 26 +++++++---- .../NdJsonFileStructureFinderTests.java | 2 +- .../TextLogFileStructureFinderTests.java | 44 ++++++++++++++----- .../XmlFileStructureFinderTests.java | 2 +- .../api/ml.find_file_structure.json | 5 +++ .../test/ml/find_file_structure.yml | 1 + 21 files changed, 191 insertions(+), 65 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FindFileStructureRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FindFileStructureRequest.java index adfee92bd61..fed417e9582 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FindFileStructureRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FindFileStructureRequest.java @@ -37,6 +37,7 @@ import java.util.Optional; public class FindFileStructureRequest implements Validatable, ToXContentFragment { public static final ParseField LINES_TO_SAMPLE = new ParseField("lines_to_sample"); + public static final ParseField LINE_MERGE_SIZE_LIMIT = new ParseField("line_merge_size_limit"); public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField CHARSET = FileStructure.CHARSET; public static final ParseField FORMAT = FileStructure.FORMAT; @@ -52,6 +53,7 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment public static final ParseField EXPLAIN = new ParseField("explain"); private Integer linesToSample; + private Integer lineMergeSizeLimit; private TimeValue timeout; private String charset; private FileStructure.Format format; @@ -77,6 +79,14 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment this.linesToSample = linesToSample; } + public Integer getLineMergeSizeLimit() { + return lineMergeSizeLimit; + } + + public void setLineMergeSizeLimit(Integer lineMergeSizeLimit) { + this.lineMergeSizeLimit = lineMergeSizeLimit; + } + public TimeValue getTimeout() { return timeout; } @@ -228,6 +238,9 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment if (linesToSample != null) { builder.field(LINES_TO_SAMPLE.getPreferredName(), linesToSample); } + if (lineMergeSizeLimit != null) { + builder.field(LINE_MERGE_SIZE_LIMIT.getPreferredName(), lineMergeSizeLimit); + } if (timeout != null) { builder.field(TIMEOUT.getPreferredName(), timeout); } @@ -270,8 +283,8 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment @Override public int hashCode() { - return Objects.hash(linesToSample, timeout, charset, format, columnNames, hasHeaderRow, delimiter, grokPattern, timestampFormat, - timestampField, explain, sample); + return Objects.hash(linesToSample, lineMergeSizeLimit, timeout, charset, format, columnNames, hasHeaderRow, delimiter, grokPattern, + timestampFormat, timestampField, explain, sample); } @Override @@ -287,6 +300,7 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment FindFileStructureRequest that = (FindFileStructureRequest) other; return Objects.equals(this.linesToSample, that.linesToSample) && + Objects.equals(this.lineMergeSizeLimit, that.lineMergeSizeLimit) && Objects.equals(this.timeout, that.timeout) && Objects.equals(this.charset, that.charset) && Objects.equals(this.format, that.format) && diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FindFileStructureRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FindFileStructureRequestTests.java index 4cb8bf0a7c1..752d0593bef 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FindFileStructureRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FindFileStructureRequestTests.java @@ -35,6 +35,7 @@ public class FindFileStructureRequestTests extends AbstractXContentTestCase p.setTimeout(TimeValue.parseTimeValue(c, FindFileStructureRequest.TIMEOUT.getPreferredName())), FindFileStructureRequest.TIMEOUT); PARSER.declareString(FindFileStructureRequest::setCharset, FindFileStructureRequest.CHARSET); @@ -72,6 +73,9 @@ public class FindFileStructureRequestTests extends AbstractXContentTestCase explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws IOException { + int lineMergeSizeLimit, FileStructureOverrides overrides, TimeoutChecker timeoutChecker) + throws IOException { return DelimitedFileStructureFinder.makeDelimitedFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, csvPreference, trimFields, overrides, timeoutChecker); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java index 8790b8f5268..45edf96ce56 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java @@ -37,6 +37,7 @@ public interface FileStructureFinderFactory { * @param sample A sample from the file to be ingested. * @param charsetName The name of the character set in which the sample was provided. * @param hasByteOrderMarker Did the sample have a byte order marker? null means "not relevant". + * @param lineMergeSizeLimit Maximum number of characters permitted when lines are merged to create messages. * @param overrides Stores structure decisions that have been made by the end user, and should * take precedence over anything the {@link FileStructureFinder} may decide. * @param timeoutChecker Will abort the operation if its timeout is exceeded. @@ -44,5 +45,6 @@ public interface FileStructureFinderFactory { * @throws Exception if something goes wrong during creation. */ FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws Exception; + int lineMergeSizeLimit, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) throws Exception; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java index 4f26276c3df..2fa8d1bb6d6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java @@ -43,6 +43,7 @@ public final class FileStructureFinderManager { public static final int MIN_SAMPLE_LINE_COUNT = 2; public static final int DEFAULT_IDEAL_SAMPLE_LINE_COUNT = 1000; + public static final int DEFAULT_LINE_MERGE_SIZE_LIMIT = 10000; static final Set FILEBEAT_SUPPORTED_ENCODINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( "866", "ansi_x3.4-1968", "arabic", "ascii", "asmo-708", "big5", "big5-hkscs", "chinese", "cn-big5", "cp1250", "cp1251", "cp1252", @@ -96,8 +97,9 @@ public final class FileStructureFinderManager { this.scheduler = Objects.requireNonNull(scheduler); } - public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile) throws Exception { - return findFileStructure(idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null); + public FileStructureFinder findFileStructure(Integer idealSampleLineCount, Integer lineMergeSizeLimit, + InputStream fromFile) throws Exception { + return findFileStructure(idealSampleLineCount, lineMergeSizeLimit, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null); } /** @@ -106,6 +108,8 @@ public final class FileStructureFinderManager { * If the stream has fewer lines then an attempt will still be made, providing at * least {@link #MIN_SAMPLE_LINE_COUNT} lines can be read. If null * the value of {@link #DEFAULT_IDEAL_SAMPLE_LINE_COUNT} will be used. + * @param lineMergeSizeLimit Maximum number of characters permitted when lines are merged to create messages. + * If null the value of {@link #DEFAULT_LINE_MERGE_SIZE_LIMIT} will be used. * @param fromFile A stream from which the sample will be read. * @param overrides Aspects of the file structure that are known in advance. These take precedence over * values determined by structure analysis. An exception will be thrown if the file structure @@ -116,20 +120,21 @@ public final class FileStructureFinderManager { * @return A {@link FileStructureFinder} object from which the structure and messages can be queried. * @throws Exception A variety of problems could occur at various stages of the structure finding process. */ - public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile, FileStructureOverrides overrides, - TimeValue timeout) - throws Exception { - return findFileStructure(new ArrayList<>(), (idealSampleLineCount == null) ? DEFAULT_IDEAL_SAMPLE_LINE_COUNT : idealSampleLineCount, - fromFile, overrides, timeout); - } - - public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, InputStream fromFile) - throws Exception { - return findFileStructure(explanation, idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null); - } - - public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, InputStream fromFile, + public FileStructureFinder findFileStructure(Integer idealSampleLineCount, Integer lineMergeSizeLimit, InputStream fromFile, FileStructureOverrides overrides, TimeValue timeout) throws Exception { + return findFileStructure(new ArrayList<>(), (idealSampleLineCount == null) ? DEFAULT_IDEAL_SAMPLE_LINE_COUNT : idealSampleLineCount, + (lineMergeSizeLimit == null) ? DEFAULT_LINE_MERGE_SIZE_LIMIT : lineMergeSizeLimit, fromFile, overrides, timeout); + } + + public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, int lineMergeSizeLimit, + InputStream fromFile) throws Exception { + return findFileStructure(explanation, idealSampleLineCount, lineMergeSizeLimit, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, + null); + } + + public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, int lineMergeSizeLimit, + InputStream fromFile, FileStructureOverrides overrides, + TimeValue timeout) throws Exception { try (TimeoutChecker timeoutChecker = new TimeoutChecker("structure analysis", timeout, scheduler)) { @@ -148,7 +153,8 @@ public final class FileStructureFinderManager { Tuple sampleInfo = sampleFile(sampleReader, charsetName, MIN_SAMPLE_LINE_COUNT, Math.max(MIN_SAMPLE_LINE_COUNT, idealSampleLineCount), timeoutChecker); - return makeBestStructureFinder(explanation, sampleInfo.v1(), charsetName, sampleInfo.v2(), overrides, timeoutChecker); + return makeBestStructureFinder(explanation, sampleInfo.v1(), charsetName, sampleInfo.v2(), lineMergeSizeLimit, overrides, + timeoutChecker); } catch (Exception e) { // Add a dummy exception containing the explanation so far - this can be invaluable for troubleshooting as incorrect // decisions made early on in the structure analysis can result in seemingly crazy decisions or timeouts later on @@ -263,7 +269,8 @@ public final class FileStructureFinderManager { } FileStructureFinder makeBestStructureFinder(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws Exception { + int lineMergeSizeLimit, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) throws Exception { Character delimiter = overrides.getDelimiter(); Character quote = overrides.getQuote(); @@ -295,7 +302,8 @@ public final class FileStructureFinderManager { for (FileStructureFinderFactory factory : factories) { timeoutChecker.check("high level format detection"); if (factory.canCreateFromSample(explanation, sample)) { - return factory.createFromSample(explanation, sample, charsetName, hasByteOrderMarker, overrides, timeoutChecker); + return factory.createFromSample(explanation, sample, charsetName, hasByteOrderMarker, lineMergeSizeLimit, overrides, + timeoutChecker); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderFactory.java index 43612890bc8..6970af01bb7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderFactory.java @@ -68,7 +68,8 @@ public class NdJsonFileStructureFinderFactory implements FileStructureFinderFact @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws IOException { + int lineMergeSizeLimit, FileStructureOverrides overrides, TimeoutChecker timeoutChecker) + throws IOException { return NdJsonFileStructureFinder.makeNdJsonFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, overrides, timeoutChecker); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index d07eea15f97..86b1d79b8b6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.xpack.core.ml.action.FindFileStructureAction; import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; @@ -24,8 +25,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { private final FileStructure structure; static TextLogFileStructureFinder makeTextLogFileStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker, FileStructureOverrides overrides, - TimeoutChecker timeoutChecker) { + Boolean hasByteOrderMarker, int lineMergeSizeLimit, + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) { String[] sampleLines = sample.split("\n"); TimestampFormatFinder timestampFormatFinder = populateTimestampFormatFinder(explanation, sampleLines, overrides, timeoutChecker); switch (timestampFormatFinder.getNumMatchedFormats()) { @@ -69,6 +70,16 @@ public class TextLogFileStructureFinder implements FileStructureFinder { // for the CSV header or lines before the first XML document starts) ++linesConsumed; } else { + // This check avoids subsequent problems when a massive message is unwieldy and slow to process + long lengthAfterAppend = message.length() + 1L + sampleLine.length(); + if (lengthAfterAppend > lineMergeSizeLimit) { + assert linesInMessage > 0; + throw new IllegalArgumentException("Merging lines into messages resulted in an unacceptably long message. " + + "Merged message would have [" + (linesInMessage + 1) + "] lines and [" + lengthAfterAppend + "] " + + "characters (limit [" + lineMergeSizeLimit + "]). If you have messages this big please increase " + + "the value of [" + FindFileStructureAction.Request.LINE_MERGE_SIZE_LIMIT + "]. Otherwise it " + + "probably means the timestamp has been incorrectly detected, so try overriding that."); + } message.append('\n').append(sampleLine); ++linesInMessage; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java index 5931fea5f1a..2980d5d0678 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java @@ -41,8 +41,8 @@ public class TextLogFileStructureFinderFactory implements FileStructureFinderFac @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides, TimeoutChecker timeoutChecker) { + int lineMergeSizeLimit, FileStructureOverrides overrides, TimeoutChecker timeoutChecker) { return TextLogFileStructureFinder.makeTextLogFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - overrides, timeoutChecker); + lineMergeSizeLimit, overrides, timeoutChecker); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java index 97984d1d775..382f2e75027 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java @@ -125,7 +125,7 @@ public class XmlFileStructureFinderFactory implements FileStructureFinderFactory @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides, TimeoutChecker timeoutChecker) + int lineMergeSizeLimit, FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws IOException, ParserConfigurationException, SAXException { return XmlFileStructureFinder.makeXmlFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, overrides, timeoutChecker); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java index 5810a2e929d..03c3fb2a39f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java @@ -53,6 +53,8 @@ public class RestFindFileStructureAction extends BaseRestHandler { FindFileStructureAction.Request request = new FindFileStructureAction.Request(); request.setLinesToSample(restRequest.paramAsInt(FindFileStructureAction.Request.LINES_TO_SAMPLE.getPreferredName(), FileStructureFinderManager.DEFAULT_IDEAL_SAMPLE_LINE_COUNT)); + request.setLineMergeSizeLimit(restRequest.paramAsInt(FindFileStructureAction.Request.LINE_MERGE_SIZE_LIMIT.getPreferredName(), + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT)); request.setTimeout(TimeValue.parseTimeValue(restRequest.param(FindFileStructureAction.Request.TIMEOUT.getPreferredName()), DEFAULT_TIMEOUT, FindFileStructureAction.Request.TIMEOUT.getPreferredName())); request.setCharset(restRequest.param(FindFileStructureAction.Request.CHARSET.getPreferredName())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java index 280a50324e4..7b157555eef 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java @@ -30,7 +30,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -64,8 +64,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -101,8 +101,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -135,7 +135,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -170,7 +170,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -214,8 +214,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -255,7 +255,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -301,8 +301,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -340,7 +340,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java index f68d8edc612..188bc9a628b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java @@ -102,7 +102,8 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { public void testMakeBestStructureGivenNdJson() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, NDJSON_SAMPLE, StandardCharsets.UTF_8.name(), - randomBoolean(), EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(NdJsonFileStructureFinder.class)); + randomBoolean(), FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), + instanceOf(NdJsonFileStructureFinder.class)); } public void testMakeBestStructureGivenNdJsonAndDelimitedOverride() throws Exception { @@ -113,12 +114,14 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { .setFormat(FileStructure.Format.DELIMITED).setQuote('\'').build(); assertThat(structureFinderManager.makeBestStructureFinder(explanation, NDJSON_SAMPLE, StandardCharsets.UTF_8.name(), - randomBoolean(), overrides, NOOP_TIMEOUT_CHECKER), instanceOf(DelimitedFileStructureFinder.class)); + randomBoolean(), FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER), + instanceOf(DelimitedFileStructureFinder.class)); } public void testMakeBestStructureGivenXml() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, XML_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(XmlFileStructureFinder.class)); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), + instanceOf(XmlFileStructureFinder.class)); } public void testMakeBestStructureGivenXmlAndTextOverride() throws Exception { @@ -126,12 +129,14 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { FileStructureOverrides overrides = FileStructureOverrides.builder().setFormat(FileStructure.Format.SEMI_STRUCTURED_TEXT).build(); assertThat(structureFinderManager.makeBestStructureFinder(explanation, XML_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides, NOOP_TIMEOUT_CHECKER), instanceOf(TextLogFileStructureFinder.class)); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER), + instanceOf(TextLogFileStructureFinder.class)); } public void testMakeBestStructureGivenCsv() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, CSV_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(DelimitedFileStructureFinder.class)); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), + instanceOf(DelimitedFileStructureFinder.class)); } public void testMakeBestStructureGivenCsvAndJsonOverride() { @@ -140,14 +145,15 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> structureFinderManager.makeBestStructureFinder(explanation, CSV_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides, NOOP_TIMEOUT_CHECKER)); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER)); assertEquals("Input did not match the specified format [ndjson]", e.getMessage()); } public void testMakeBestStructureGivenText() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, TEXT_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(TextLogFileStructureFinder.class)); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), + instanceOf(TextLogFileStructureFinder.class)); } public void testMakeBestStructureGivenTextAndDelimitedOverride() throws Exception { @@ -157,7 +163,8 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { .setFormat(FileStructure.Format.DELIMITED).setDelimiter(':').build(); assertThat(structureFinderManager.makeBestStructureFinder(explanation, TEXT_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides, NOOP_TIMEOUT_CHECKER), instanceOf(DelimitedFileStructureFinder.class)); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER), + instanceOf(DelimitedFileStructureFinder.class)); } public void testFindFileStructureTimeout() throws IOException, InterruptedException { @@ -190,7 +197,8 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { junkProducer.start(); ElasticsearchTimeoutException e = expectThrows(ElasticsearchTimeoutException.class, - () -> structureFinderManager.findFileStructure(explanation, linesOfJunk - 1, bigInput, EMPTY_OVERRIDES, timeout)); + () -> structureFinderManager.findFileStructure(explanation, FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, + linesOfJunk - 1, bigInput, EMPTY_OVERRIDES, timeout)); assertThat(e.getMessage(), startsWith("Aborting structure analysis during [")); assertThat(e.getMessage(), endsWith("] as it has taken longer than the timeout of [" + timeout + "]")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderTests.java index a220bdf3b06..048d2708e77 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/NdJsonFileStructureFinderTests.java @@ -19,7 +19,7 @@ public class NdJsonFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = factory.createFromSample(explanation, NDJSON_SAMPLE, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java index 6ac672f6178..4c921c8a9f9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java @@ -20,13 +20,36 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { private FileStructureFinderFactory factory = new TextLogFileStructureFinderFactory(); + public void testCreateConfigsGivenLowLineMergeSizeLimit() { + + String sample = "2019-05-16 16:56:14 line 1 abcdefghijklmnopqrstuvwxyz\n" + + "2019-05-16 16:56:14 line 2 abcdefghijklmnopqrstuvwxyz\n" + + "continuation line 2.1\n" + + "continuation line 2.2\n" + + "continuation line 2.3\n" + + "continuation line 2.4\n" + + "2019-05-16 16:56:14 line 3 abcdefghijklmnopqrstuvwxyz\n"; + + assertTrue(factory.canCreateFromSample(explanation, sample)); + + String charset = randomFrom(POSSIBLE_CHARSETS); + Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> factory.createFromSample(explanation, sample, charset, hasByteOrderMarker, 100, + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER)); + + assertEquals("Merging lines into messages resulted in an unacceptably long message. Merged message would have [4] lines and " + + "[119] characters (limit [100]). If you have messages this big please increase the value of [line_merge_size_limit]. " + + "Otherwise it probably means the timestamp has been incorrectly detected, so try overriding that.", e.getMessage()); + } + public void testCreateConfigsGivenElasticsearchLog() throws Exception { assertTrue(factory.canCreateFromSample(explanation, TEXT_SAMPLE)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -66,8 +89,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -102,8 +125,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -139,8 +162,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides, - NOOP_TIMEOUT_CHECKER); + FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -181,7 +204,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides, NOOP_TIMEOUT_CHECKER)); + () -> factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER)); assertEquals("Supplied Grok pattern [\\[%{LOGLEVEL:loglevel} *\\]\\[%{HOSTNAME:node}\\]\\[%{TIMESTAMP_ISO8601:timestamp}\\] " + "\\[%{JAVACLASS:class} *\\] %{JAVALOGMESSAGE:message}] does not match sample messages", e.getMessage()); @@ -200,8 +224,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> factory.createFromSample(explanation, sample, charset, hasByteOrderMarker, FileStructureOverrides.EMPTY_OVERRIDES, - NOOP_TIMEOUT_CHECKER)); + () -> factory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER)); assertEquals("Failed to create more than one message from the sample lines provided. (The last is discarded in " + "case the sample is incomplete.) If your sample does contain multiple messages the problem is probably that " diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java index b6f93a6e39b..9ad07f61427 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java @@ -19,7 +19,7 @@ public class XmlFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = factory.createFromSample(explanation, XML_SAMPLE, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.find_file_structure.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.find_file_structure.json index 4e5550ae824..2f65a5d9749 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.find_file_structure.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.find_file_structure.json @@ -11,6 +11,11 @@ "description": "How many lines of the file should be included in the analysis", "default": 1000 }, + "line_merge_size_limit": { + "type": "int", + "description": "Maximum number of characters permitted in a single message when lines are merged to create messages.", + "default": 10000 + }, "timeout": { "type": "time", "description": "Timeout after which the analysis will be aborted", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml index 7c6aff66e3d..a9634605aaa 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml @@ -10,6 +10,7 @@ setup: Content-Type: "application/json" ml.find_file_structure: lines_to_sample: 3 + line_merge_size_limit: 1234 timeout: 10s body: - airline: AAL From 6c50246a58976f4f871413d2c83080fb30391267 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 3 Jun 2019 17:50:49 +0200 Subject: [PATCH 039/210] SQL: [Docs] Fix links syntax (#42806) Fix a couple of wrong links because of the order of the anchor and the usage of backquotes. (cherry picked from commit 4e0c6525153b60a57202937c2ae57968c8e35285) --- docs/reference/sql/language/syntax/commands/select.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/sql/language/syntax/commands/select.asciidoc b/docs/reference/sql/language/syntax/commands/select.asciidoc index 0a4922a3cff..1ae8e219acf 100644 --- a/docs/reference/sql/language/syntax/commands/select.asciidoc +++ b/docs/reference/sql/language/syntax/commands/select.asciidoc @@ -205,7 +205,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[groupByAndMultipleAggs] ---- [TIP] -If custom bucketing is required, it can be achieved with the use of `<>`, +If custom bucketing is required, it can be achieved with the use of <>, as shown <>. [[sql-syntax-group-by-implicit]] @@ -337,7 +337,7 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[orderByAgg] ---- IMPORTANT: Ordering by aggregation is possible for up to 512 entries for memory consumption reasons. -In cases where the results pass this threshold, use <<`LIMIT`, sql-syntax-limit>> to reduce the number +In cases where the results pass this threshold, use <> to reduce the number of results. [[sql-syntax-order-by-score]] From 34fd9ce067272fc4668019d6def50acb7134aad4 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 3 Jun 2019 10:25:52 -0700 Subject: [PATCH 040/210] Fix error with test conventions on tasks that require Docker (#42719) --- .../src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 894496c8329..e90273ccdbd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -260,7 +260,7 @@ class BuildPlugin implements Plugin { if (ext.get('buildDocker')) { (ext.get('requiresDocker') as List).add(task) } else { - task.enabled = false + task.onlyIf { false } } } From 00db9c1a2f1fc9718be7a613522b7317bc55c81e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 3 Jun 2019 19:29:36 +0200 Subject: [PATCH 041/210] Make Connection Future Err. Handling more Resilient (#42781) (#42804) * There were a number of possible (runtime-) exceptions that could be raised in the adjusted code and prevent resolving the listener * Relates #42350 --- .../elasticsearch/transport/TcpTransport.java | 54 +++++++++---------- 1 file changed, 24 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index eef9f4f4263..7be8872ab5f 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -933,34 +933,20 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements if (countDown.countDown()) { final TcpChannel handshakeChannel = channels.get(0); try { - executeHandshake(node, handshakeChannel, connectionProfile, new ActionListener() { - @Override - public void onResponse(Version version) { - NodeChannels nodeChannels = new NodeChannels(node, channels, connectionProfile, version); - long relativeMillisTime = threadPool.relativeTimeInMillis(); - nodeChannels.channels.forEach(ch -> { - // Mark the channel init time - ch.getChannelStats().markAccessed(relativeMillisTime); - ch.addCloseListener(ActionListener.wrap(nodeChannels::close)); - }); - keepAlive.registerNodeConnection(nodeChannels.channels, connectionProfile); - listener.onResponse(nodeChannels); - } - - @Override - public void onFailure(Exception e) { - CloseableChannel.closeChannels(channels, false); - - if (e instanceof ConnectTransportException) { - listener.onFailure(e); - } else { - listener.onFailure(new ConnectTransportException(node, "general node connection failure", e)); - } - } - }); + executeHandshake(node, handshakeChannel, connectionProfile, ActionListener.wrap(version -> { + NodeChannels nodeChannels = new NodeChannels(node, channels, connectionProfile, version); + long relativeMillisTime = threadPool.relativeTimeInMillis(); + nodeChannels.channels.forEach(ch -> { + // Mark the channel init time + ch.getChannelStats().markAccessed(relativeMillisTime); + ch.addCloseListener(ActionListener.wrap(nodeChannels::close)); + }); + keepAlive.registerNodeConnection(nodeChannels.channels, connectionProfile); + listener.onResponse(nodeChannels); + }, e -> closeAndFail(e instanceof ConnectTransportException ? + e : new ConnectTransportException(node, "general node connection failure", e)))); } catch (Exception ex) { - CloseableChannel.closeChannels(channels, false); - listener.onFailure(ex); + closeAndFail(ex); } } } @@ -968,15 +954,23 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements @Override public void onFailure(Exception ex) { if (countDown.fastForward()) { - CloseableChannel.closeChannels(channels, false); - listener.onFailure(new ConnectTransportException(node, "connect_exception", ex)); + closeAndFail(new ConnectTransportException(node, "connect_exception", ex)); } } public void onTimeout() { if (countDown.fastForward()) { + closeAndFail(new ConnectTransportException(node, "connect_timeout[" + connectionProfile.getConnectTimeout() + "]")); + } + } + + private void closeAndFail(Exception e) { + try { CloseableChannel.closeChannels(channels, false); - listener.onFailure(new ConnectTransportException(node, "connect_timeout[" + connectionProfile.getConnectTimeout() + "]")); + } catch (Exception ex) { + e.addSuppressed(ex); + } finally { + listener.onFailure(e); } } } From 428beabc49739744cbad9ec09d5be2e6f0d00a00 Mon Sep 17 00:00:00 2001 From: Yu Date: Tue, 4 Jun 2019 00:37:59 +0800 Subject: [PATCH 042/210] Remove "template" field in IndexTemplateMetaData (#42099) Remove "template" field from XContent parsing in IndexTemplateMetaData --- .../cluster/metadata/IndexTemplateMetaData.java | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index f61b2fc208f..88bf3a629af 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; @@ -33,7 +32,6 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; @@ -53,8 +51,6 @@ import java.util.Set; public class IndexTemplateMetaData extends AbstractDiffable { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(IndexTemplateMetaData.class)); - private final String name; private final int order; @@ -98,7 +94,7 @@ public class IndexTemplateMetaData extends AbstractDiffable VALID_FIELDS = Sets.newHashSet( - "template", "order", "mappings", "settings", "index_patterns", "aliases", "version"); + "order", "mappings", "settings", "index_patterns", "aliases", "version"); private String name; @@ -507,11 +503,7 @@ public class IndexTemplateMetaData extends AbstractDiffable Date: Mon, 3 Jun 2019 22:26:01 +0200 Subject: [PATCH 043/210] [Docs] Add note for date patterns used for index search. (#42810) Add an explanatory NOTE section to draw attention to the difference between small and capital letters used for the index date patterns. e.g.: HH vs hh, MM vs mm. Closes: #22322 (cherry picked from commit c8125417dc33215651f9bb76c9b1ffaf25f41caf) --- docs/reference/api-conventions.asciidoc | 4 ++++ docs/reference/ingest/processors/date-index-name.asciidoc | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index edace173495..9e0578ee528 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -86,6 +86,10 @@ Where: `date_format`:: is the optional format in which the computed date should be rendered. Defaults to `yyyy.MM.dd`. Format should be compatible with java-time https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html `time_zone`:: is the optional time zone. Defaults to `utc`. +NOTE: Pay attention to the usage of small vs capital letters used in the `date_format`. For example: +`mm` denotes minute of hour, while `MM` denotes month of year. Similarly `hh` denotes the hour in the +`1-12` range in combination with `AM/PM`, while `HH` denotes the hour in the `0-23` 24-hour range. + Date math expressions are resolved locale-independent. Consequently, it is not possible to use any other calendars than the Gregorian calendar. diff --git a/docs/reference/ingest/processors/date-index-name.asciidoc b/docs/reference/ingest/processors/date-index-name.asciidoc index fa749fd3420..783ecc9b2b1 100644 --- a/docs/reference/ingest/processors/date-index-name.asciidoc +++ b/docs/reference/ingest/processors/date-index-name.asciidoc @@ -140,6 +140,6 @@ understands this to mean `2016-04-01` as is explained in the <>. +| `index_name_format` | no | yyyy-MM-dd | The format to be used when printing the parsed date into the index name. A valid java time pattern is expected here. Supports <>. include::common-options.asciidoc[] |====== From de72fe344c21f97774ff8861213cf74a1f2e7bf3 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 3 Jun 2019 13:22:45 -0700 Subject: [PATCH 044/210] Add Basic Date Docs to Painless (#42544) --- docs/painless/painless-guide/index.asciidoc | 2 + .../painless-guide/painless-datetime.asciidoc | 320 ++++++++++++++++++ .../org/elasticsearch/painless/DateTests.java | 180 ++++++++++ 3 files changed, 502 insertions(+) create mode 100644 docs/painless/painless-guide/painless-datetime.asciidoc create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/DateTests.java diff --git a/docs/painless/painless-guide/index.asciidoc b/docs/painless/painless-guide/index.asciidoc index b45406a4e72..2243608ffb1 100644 --- a/docs/painless/painless-guide/index.asciidoc +++ b/docs/painless/painless-guide/index.asciidoc @@ -1,5 +1,7 @@ include::painless-walkthrough.asciidoc[] +include::painless-datetime.asciidoc[] + include::painless-method-dispatch.asciidoc[] include::painless-debugging.asciidoc[] diff --git a/docs/painless/painless-guide/painless-datetime.asciidoc b/docs/painless/painless-guide/painless-datetime.asciidoc new file mode 100644 index 00000000000..ef8ef8fd8c8 --- /dev/null +++ b/docs/painless/painless-guide/painless-datetime.asciidoc @@ -0,0 +1,320 @@ +[[painless-datetime]] +=== Using Datetime in Painless + +==== Datetime API + +Datetimes in Painless use the standard Java libraries and are available through +the Painless <>. Most of the classes +from the following Java packages are available to use in Painless scripts: + +* <> +* <> +* <> +* <> +* <> + +==== Datetime Representation + +Datetimes in Painless are most commonly represented as a +<>, a <>, or a +<>. + +long:: represents a datetime as the number of milliseconds or nanoseconds since +epoch (1970-01-01T00:00:00Z) +String:: represents a datetime as a sequence of characters defined by a +well-known standard such as https://en.wikipedia.org/wiki/ISO_8601[ISO 8601] or +defined by the source of input in a custom way +ZonedDateTime:: a <> (object) that contains an +internal representation of a datetime and provides numerous +<> for +modification and comparison. + +Switching between different representations of datetimes is often necessary to +achieve a script's objective(s). A typical pattern in a script is to switch a +long or String representation of a datetime to a ZonedDateTime representation, +modify or compare the ZonedDateTime representation, and then switch it back to +a long or String representation for storage or as a returned result. + +==== Datetime Parsing and Formatting + +Datetime parsing is a switch from a String representation to a ZonedDateTime +representation, and datetime formatting is a switch from a ZonedDateTime +representation to a String representation. + +A <> is a +<> (object) that defines the allowed sequence +of characters for a String representation of a datetime. Datetime parsing and +formatting often requires a DateTimeFormatter. For more information about how +to use a DateTimeFormatter see the +{java11-javadoc}/java.base/java/time/format/DateTimeFormatter.html[Java documentation]. + +===== Datetime Parsing Examples + +* parse from milliseconds ++ +[source,Painless] +---- +String milliSinceEpochString = "434931330000"; +long milliSinceEpoch = Long.parseLong(milliSinceEpochString); +Instant instant = Instant.ofEpochMilli(milliSinceEpoch); +ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of('Z')); +---- ++ +* parse from ISO 8601 ++ +[source,Painless] +---- +String datetime = '1983-10-13T22:15:30Z'; +ZonedDateTime zdt = ZonedDateTime.parse(datetime); +---- +Note the parse method uses ISO 8601 by default. ++ +* parse from RFC 1123 ++ +[source,Painless] +---- +String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT'; +ZonedDateTime zdt = ZonedDateTime.parse(datetime, + DateTimeFormatter.RFC_1123_DATE_TIME); +---- +Note the use of a built-in DateTimeFormatter. ++ +* parse from a custom format ++ +[source,Painless] +---- +String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z'; +DateTimeFormatter dtf = DateTimeFormatter.ofPattern( + "'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV"); +ZonedDateTime zdt = ZonedDateTime.parse(datetime, dtf); +---- +Note the use of a custom DateTimeFormatter. + +===== Datetime Formatting Examples + +* format to a String (ISO 8601) ++ +[source,Painless] +---- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +String datetime = zdt.format(DateTimeFormatter.ISO_INSTANT); +---- +Note the use of a built-in DateTimeFormatter. ++ +* format to a String (custom) ++ +[source,Painless] +---- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +DateTimeFormatter dtf = DateTimeFormatter.ofPattern( + "'date:' yyyy/MM/dd 'time:' HH:mm:ss"); +String datetime = zdt.format(dtf); +---- +Note the use of a custom DateTimeFormatter. + +==== Datetime Conversion + +Datetime conversion is a switch from a long representation to a ZonedDateTime +representation and vice versa. + +===== Datetime Conversion Examples + +* convert from milliseconds ++ +[source,Painless] +---- +long milliSinceEpoch = 434931330000L; +Instant instant = Instant.ofEpochMilli(milliSinceEpoch); +ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of('Z')); +---- ++ +* convert to milliseconds ++ +[source,Painless] +----- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +long milliSinceEpoch = zdt.toInstant().toEpochMilli(); +----- + +==== Datetime Pieces + +Use the ZonedDateTime +<> to create a new +ZonedDateTime from pieces (year, month, day, hour, minute, second, nano, +time zone). Use ZonedDateTime +<> to extract pieces from +a ZonedDateTime. + +===== Datetime Pieces Examples + +* create a ZonedDateTime from pieces ++ +[source,Painless] +---- +int year = 1983; +int month = 10; +int day = 13; +int hour = 22; +int minutes = 15; +int seconds = 30; +int nanos = 0; +ZonedDateTime zdt = ZonedDateTime.of( + year, month, day, hour, minutes, seconds, nanos, ZoneId.of('Z')); +---- ++ +* extract pieces from a ZonedDateTime ++ +[source,Painless] +---- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of(tz)); +int year = zdt.getYear(); +int month = zdt.getMonthValue(); +int day = zdt.getDayOfMonth(); +int hour = zdt.getHour(); +int minutes = zdt.getMinute(); +int seconds = zdt.getSecond(); +int nanos = zdt.getNano(); +---- + +==== Datetime Modification + +Use either a long or a ZonedDateTime to do datetime modification such as adding +several seconds to a datetime or subtracting several days from a datetime. Use +standard <> to modify a long +representation of a datetime. Use ZonedDateTime +<> to modify a +ZonedDateTime representation of a datetime. Note most modification methods for +a ZonedDateTime return a new instance for assignment or immediate use. + +===== Datetime Modification Examples + +* Subtract three seconds from milliseconds ++ +[source,Painless] +---- +long milliSinceEpoch = 434931330000L; +milliSinceEpoch = milliSinceEpoch - 1000L*3L; +---- ++ +* Add three days to a datetime ++ +[source,Painless] +---- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +ZonedDateTime updatedZdt = zdt.plusDays(3); +---- ++ +* Subtract 125 minutes from a datetime ++ +[source,Painless] +---- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +ZonedDateTime updatedZdt = zdt.minusMinutes(125); +---- ++ +* Set the year on a datetime ++ +[source,Painless] +---- +ZonedDateTime zdt = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +ZonedDateTime updatedZdt = zdt.withYear(1976); +---- + +==== Elapsed Time + +Use either two longs or two ZonedDateTimes to calculate an elapsed +time (difference) between two datetimes. Use +<> to calculate an elapsed time +between two longs of the same time unit such as milliseconds. For more complex +datetimes. use <> to +calculate the difference between two ZonedDateTimes. + +===== Elapsed Time Examples + +* Elapsed time for two millisecond datetimes ++ +[source,Painless] +---- +long startTimestamp = 434931327000L; +long endTimestamp = 434931330000L; +long differenceInMillis = endTimestamp - startTimestamp; +---- ++ +* Elapsed time in milliseconds for two datetimes ++ +[source,Painless] +---- +ZonedDateTime zdt1 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z')); +ZonedDateTime zdt2 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z')); +long differenceInMillis = ChronoUnit.MILLIS.between(zdt1, zdt2); +---- ++ +* Elapsed time in days for two datetimes ++ +[source,Painless] +---- +ZonedDateTime zdt1 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z')); +ZonedDateTime zdt2 = + ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z')); +long differenceInDays = ChronoUnit.DAYS.between(zdt1, zdt2); +---- + +==== Datetime Comparison + +Use either two longs or two ZonedDateTimes to do a datetime comparison. Use +standard <> to compare two +longs of the same time unit such as milliseconds. For more complex datetimes, +use ZonedDateTime <> to +compare two ZonedDateTimes. + +===== Datetime Comparison Examples + +* Comparison of two millisecond datetimes ++ +[source,Painless] +---- +long timestamp1 = 434931327000L; +long timestamp2 = 434931330000L; + +if (timestamp1 > timestamp2) { + // handle condition +} +---- ++ +* Before comparision of two datetimes ++ +[source,Painless] +---- +ZonedDateTime zdt1 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +ZonedDateTime zdt2 = + ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z')); + +if (zdt1.isBefore(zdt2)) { + // handle condition +} +---- ++ +* After comparision of two datetimes ++ +[source,Painless] +---- +ZonedDateTime zdt1 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +ZonedDateTime zdt2 = + ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z')); + +if (zdt1.isAfter(zdt2)) { + // handle condition +} +---- diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DateTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DateTests.java new file mode 100644 index 00000000000..58357cce3ac --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DateTests.java @@ -0,0 +1,180 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +public class DateTests extends ScriptTestCase { + + public void testLongToZonedDateTime() { + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "long milliSinceEpoch = 434931330000L;" + + "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + + "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));" + )); + } + + public void testStringToZonedDateTime() { + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "String milliSinceEpochString = '434931330000';" + + "long milliSinceEpoch = Long.parseLong(milliSinceEpochString);" + + "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + + "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));" + )); + + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "String datetime = '1983-10-13T22:15:30Z';" + + "return ZonedDateTime.parse(datetime);" + )); + + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT';" + + "return ZonedDateTime.parse(datetime, DateTimeFormatter.RFC_1123_DATE_TIME);" + )); + + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z';" + + "DateTimeFormatter dtf = DateTimeFormatter.ofPattern(" + + "\"'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV\");" + + "return ZonedDateTime.parse(datetime, dtf);" + )); + } + + public void testPiecesToZonedDateTime() { + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "int year = 1983;" + + "int month = 10;" + + "int day = 13;" + + "int hour = 22;" + + "int minutes = 15;" + + "int seconds = 30;" + + "int nanos = 0;" + + "String tz = 'Z';" + + "return ZonedDateTime.of(year, month, day, hour, minutes, seconds, nanos, ZoneId.of(tz));" + )); + } + + public void testZonedDatetimeToLong() { + assertEquals(434931330000L, exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.toInstant().toEpochMilli();" + )); + } + + public void testZonedDateTimeToString() { + assertEquals("1983-10-13T22:15:30Z", exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.format(DateTimeFormatter.ISO_INSTANT);" + )); + + assertEquals("date: 1983/10/13 time: 22:15:30", exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "DateTimeFormatter dtf = DateTimeFormatter.ofPattern(" + + "\"'date:' yyyy/MM/dd 'time:' HH:mm:ss\");" + + "return zdt.format(dtf);" + )); + } + + public void testZonedDateTimeToPieces() { + assertArrayEquals(new int[] {1983, 10, 13, 22, 15, 30, 100}, (int[])exec( + "int[] pieces = new int[7];" + + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of('Z'));" + + "pieces[0] = zdt.year;" + + "pieces[1] = zdt.monthValue;" + + "pieces[2] = zdt.dayOfMonth;" + + "pieces[3] = zdt.hour;" + + "pieces[4] = zdt.minute;" + + "pieces[5] = zdt.second;" + + "pieces[6] = zdt.nano;" + + "return pieces;" + )); + } + + public void testLongManipulation() { + assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 27, 0, ZoneId.of("Z")), exec( + "long milliSinceEpoch = 434931330000L;" + + "milliSinceEpoch = milliSinceEpoch - 1000L*3L;" + + "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + + "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'))" + )); + } + + public void testZonedDateTimeManipulation() { + assertEquals(ZonedDateTime.of(1983, 10, 16, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.plusDays(3);" + )); + + assertEquals(ZonedDateTime.of(1983, 10, 13, 20, 10, 30, 0, ZoneId.of("Z")), exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.minusMinutes(125);" + )); + + assertEquals(ZonedDateTime.of(1976, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.withYear(1976);" + )); + } + + public void testLongTimeDifference() { + assertEquals(3000L, exec( + "long startTimestamp = 434931327000L;" + + "long endTimestamp = 434931330000L;" + + "return endTimestamp - startTimestamp;" + )); + } + + public void testZonedDateTimeDifference() { + assertEquals(4989L, exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return ChronoUnit.MILLIS.between(zdt1, zdt2);" + )); + + assertEquals(4L, exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return ChronoUnit.DAYS.between(zdt1, zdt2);" + )); + } + + public void compareLongs() { + assertEquals(false, exec( + "long ts1 = 434931327000L;" + + "long ts2 = 434931330000L;" + + "return ts1 > ts2;" + )); + } + + public void compareZonedDateTimes() { + assertEquals(true, exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return zdt1.isBefore(zdt2);" + )); + + assertEquals(false, exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return zdt1.isAfter(zdt2);" + )); + } +} From eab88354f20f9a93f02ee350ab1dcec0c09fc4e1 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 3 Jun 2019 22:56:39 +0200 Subject: [PATCH 045/210] [Docs] Fix reference to `boost` and `slop` params (#42803) For `multi_match` query: link `boost` param to the generic reference for query usage and `slop` to the `match_phrase` query where its usage is documented. Fixes: #40091 (cherry picked from commit 69993049a8bd9e7f042935729fe69a8266d95a0a) --- docs/reference/query-dsl/multi-match-query.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/query-dsl/multi-match-query.asciidoc b/docs/reference/query-dsl/multi-match-query.asciidoc index 9f574ed814d..4e32670c3f7 100644 --- a/docs/reference/query-dsl/multi-match-query.asciidoc +++ b/docs/reference/query-dsl/multi-match-query.asciidoc @@ -292,9 +292,9 @@ GET /_search -------------------------------------------------- // CONSOLE -Also, accepts `analyzer`, `boost`, `lenient`, `slop` and `zero_terms_query` as explained -in <>. Type `phrase_prefix` additionally accepts -`max_expansions`. +Also, accepts `analyzer`, <>, `lenient` and `zero_terms_query` as explained +in <>, as well as `slop` which is explained in <>. +Type `phrase_prefix` additionally accepts `max_expansions`. [IMPORTANT] [[phrase-fuzziness]] From 87cc6a974c13c65c3136432aa4c6e56ba958ac34 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 3 Jun 2019 19:49:58 -0500 Subject: [PATCH 046/210] [ML] [Data Frame] adding and modifying auditor messages (#42722) (#42818) * [ML] [Data Frame] adding and modifying auditor messages * Update DataFrameTransformTask.java --- .../TransportDeleteDataFrameTransformAction.java | 10 ++++++++-- .../TransportPutDataFrameTransformAction.java | 11 +++++++++-- .../TransportStartDataFrameTransformAction.java | 5 ++--- .../transforms/DataFrameTransformTask.java | 16 +++++++++------- 4 files changed, 28 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java index ac40334dfb4..1e0fcd31fb2 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction; import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction.Request; +import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import java.io.IOException; @@ -30,14 +31,16 @@ import java.io.IOException; public class TransportDeleteDataFrameTransformAction extends TransportMasterNodeAction { private final DataFrameTransformsConfigManager transformsConfigManager; + private final DataFrameAuditor auditor; @Inject public TransportDeleteDataFrameTransformAction(TransportService transportService, ActionFilters actionFilters, ThreadPool threadPool, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, - DataFrameTransformsConfigManager transformsConfigManager) { + DataFrameTransformsConfigManager transformsConfigManager, DataFrameAuditor auditor) { super(DeleteDataFrameTransformAction.NAME, transportService, clusterService, threadPool, actionFilters, Request::new, indexNameExpressionResolver); this.transformsConfigManager = transformsConfigManager; + this.auditor = auditor; } @Override @@ -65,7 +68,10 @@ public class TransportDeleteDataFrameTransformAction extends TransportMasterNode } else { // Task is not running, delete the configuration document transformsConfigManager.deleteTransform(request.getId(), ActionListener.wrap( - r -> listener.onResponse(new AcknowledgedResponse(r)), + r -> { + auditor.info(request.getId(), "Deleted data frame transform."); + listener.onResponse(new AcknowledgedResponse(r)); + }, listener::onFailure)); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java index 997739b2407..b4d5957c0f5 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java @@ -46,6 +46,7 @@ import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot; @@ -65,12 +66,14 @@ public class TransportPutDataFrameTransformAction private final Client client; private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; private final SecurityContext securityContext; + private final DataFrameAuditor auditor; @Inject public TransportPutDataFrameTransformAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, XPackLicenseState licenseState, - DataFrameTransformsConfigManager dataFrameTransformsConfigManager, Client client) { + DataFrameTransformsConfigManager dataFrameTransformsConfigManager, Client client, + DataFrameAuditor auditor) { super(PutDataFrameTransformAction.NAME, transportService, clusterService, threadPool, actionFilters, PutDataFrameTransformAction.Request::new, indexNameExpressionResolver); this.licenseState = licenseState; @@ -78,6 +81,7 @@ public class TransportPutDataFrameTransformAction this.dataFrameTransformsConfigManager = dataFrameTransformsConfigManager; this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) : null; + this.auditor = auditor; } @Override @@ -234,7 +238,10 @@ public class TransportPutDataFrameTransformAction // <5> Return the listener, or clean up destination index on failure. ActionListener putTransformConfigurationListener = ActionListener.wrap( - putTransformConfigurationResult -> listener.onResponse(new AcknowledgedResponse(true)), + putTransformConfigurationResult -> { + auditor.info(config.getId(), "Created data frame transform."); + listener.onResponse(new AcknowledgedResponse(true)); + }, listener::onFailure ); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java index 8b7bcb8d764..e23e54d67b5 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java @@ -184,11 +184,10 @@ public class TransportStartDataFrameTransformAction extends if(dest.length == 0) { auditor.info(request.getId(), - "Could not find destination index [" + destinationIndex + "]." + - " Creating index with deduced mappings."); + "Creating destination index [" + destinationIndex + "] with deduced mappings."); createDestinationIndex(config, createOrGetIndexListener); } else { - auditor.info(request.getId(), "Destination index [" + destinationIndex + "] already exists."); + auditor.info(request.getId(), "Using existing destination index [" + destinationIndex + "]."); ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), ClientHelper.DATA_FRAME_ORIGIN, client.admin() diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 575cd4c15bd..20ef5be09e8 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -213,7 +213,8 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S logger.info("Updating state for data frame transform [{}] to [{}]", transform.getId(), state.toString()); persistStateToClusterState(state, ActionListener.wrap( task -> { - auditor.info(transform.getId(), "Updated state to [" + state.getTaskState() + "]"); + auditor.info(transform.getId(), + "Updated data frame transform state to [" + state.getTaskState() + "]."); long now = System.currentTimeMillis(); // kick off the indexer triggered(new Event(schedulerJobName(), now, now)); @@ -293,10 +294,9 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S synchronized void markAsFailed(String reason, ActionListener listener) { taskState.set(DataFrameTransformTaskState.FAILED); stateReason.set(reason); + auditor.error(transform.getId(), reason); persistStateToClusterState(getState(), ActionListener.wrap( - r -> { - listener.onResponse(null); - }, + r -> listener.onResponse(null), listener::onFailure )); } @@ -560,6 +560,8 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S }, statsExc -> { logger.error("Updating stats of transform [" + transformConfig.getId() + "] failed", statsExc); + auditor.warning(getJobId(), + "Failure updating stats of transform: " + statsExc.getMessage()); next.run(); } )); @@ -588,7 +590,7 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S try { super.onFinish(listener); long checkpoint = transformTask.currentCheckpoint.incrementAndGet(); - auditor.info(transformTask.getTransformId(), "Finished indexing for data frame transform checkpoint [" + checkpoint + "]"); + auditor.info(transformTask.getTransformId(), "Finished indexing for data frame transform checkpoint [" + checkpoint + "]."); logger.info( "Finished indexing for data frame transform [" + transformTask.getTransformId() + "] checkpoint [" + checkpoint + "]"); listener.onResponse(null); @@ -599,14 +601,14 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S @Override protected void onStop() { - auditor.info(transformConfig.getId(), "Indexer has stopped"); + auditor.info(transformConfig.getId(), "Data frame transform has stopped."); logger.info("Data frame transform [{}] indexer has stopped", transformConfig.getId()); transformTask.shutdown(); } @Override protected void onAbort() { - auditor.info(transformConfig.getId(), "Received abort request, stopping indexer"); + auditor.info(transformConfig.getId(), "Received abort request, stopping data frame transform."); logger.info("Data frame transform [" + transformConfig.getId() + "] received abort request, stopping indexer"); transformTask.shutdown(); } From 9035e61825fdf446c7a2d89d08cdcae498e75aa3 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 4 Jun 2019 14:25:20 +1000 Subject: [PATCH 047/210] Detect when security index is closed (#42740) If the security index is closed, it should be treated as unavailable for security purposes. Prior to 8.0 (or in a mixed cluster) a closed security index has no routing data, which would cause a NPE in the cluster change handler, and the index state would not be updated correctly. This commit fixes that problem Backport of: #42191 --- .../support/SecurityIndexManager.java | 77 ++++++++++++------- .../authc/AuthenticationServiceTests.java | 6 +- .../authc/esnative/NativeRealmTests.java | 6 +- .../mapper/NativeRoleMappingStoreTests.java | 18 +++-- .../authz/store/CompositeRolesStoreTests.java | 17 ++-- .../support/SecurityIndexManagerTests.java | 56 +++++++++++--- 6 files changed, 119 insertions(+), 61 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index a96693c4556..a3977350962 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -45,8 +45,10 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; import org.elasticsearch.xpack.core.template.TemplateUtils; @@ -173,9 +175,11 @@ public class SecurityIndexManager implements ClusterStateListener { throw new IllegalStateException("caller must make sure to use a frozen state and check indexAvailable"); } - if (localState.indexExists()) { + if (localState.indexState == IndexMetaData.State.CLOSE) { + return new IndexClosedException(new Index(localState.concreteIndexName, ClusterState.UNKNOWN_UUID)); + } else if (localState.indexExists()) { return new UnavailableShardsException(null, - "at least one primary shard for the index [" + localState.concreteIndexName + "] is unavailable"); + "at least one primary shard for the index [" + localState.concreteIndexName + "] is unavailable"); } else { return new IndexNotFoundException(localState.concreteIndexName); } @@ -206,11 +210,24 @@ public class SecurityIndexManager implements ClusterStateListener { final boolean indexAvailable = checkIndexAvailable(event.state()); final boolean mappingIsUpToDate = indexMetaData == null || checkIndexMappingUpToDate(event.state()); final Version mappingVersion = oldestIndexMappingVersion(event.state()); - final ClusterHealthStatus indexStatus = indexMetaData == null ? null : - new ClusterIndexHealth(indexMetaData, event.state().getRoutingTable().index(indexMetaData.getIndex())).getStatus(); final String concreteIndexName = indexMetaData == null ? internalIndexName : indexMetaData.getIndex().getName(); + final ClusterHealthStatus indexHealth; + final IndexMetaData.State indexState; + if (indexMetaData == null) { + // Index does not exist + indexState = null; + indexHealth = null; + } else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { + indexState = IndexMetaData.State.CLOSE; + indexHealth = null; + logger.warn("Index [{}] is closed. This is likely to prevent security from functioning correctly", concreteIndexName); + } else { + indexState = IndexMetaData.State.OPEN; + final IndexRoutingTable routingTable = event.state().getRoutingTable().index(indexMetaData.getIndex()); + indexHealth = new ClusterIndexHealth(indexMetaData, routingTable).getStatus(); + } final State newState = new State(creationTime, isIndexUpToDate, indexAvailable, mappingIsUpToDate, mappingVersion, - concreteIndexName, indexStatus); + concreteIndexName, indexHealth, indexState); this.indexState = newState; if (newState.equals(previousState) == false) { @@ -221,23 +238,21 @@ public class SecurityIndexManager implements ClusterStateListener { } private boolean checkIndexAvailable(ClusterState state) { - final IndexRoutingTable routingTable = getIndexRoutingTable(state); - if (routingTable != null && routingTable.allPrimaryShardsActive()) { - return true; - } - logger.debug("Index [{}] is not yet active", aliasName); - return false; - } - - /** - * Returns the routing-table for this index, or null if the index does not exist. - */ - private IndexRoutingTable getIndexRoutingTable(ClusterState clusterState) { - IndexMetaData metaData = resolveConcreteIndex(aliasName, clusterState.metaData()); + IndexMetaData metaData = resolveConcreteIndex(aliasName, state.metaData()); if (metaData == null) { - return null; + logger.debug("Index [{}] is not available - no metadata", aliasName); + return false; + } + if (metaData.getState() == IndexMetaData.State.CLOSE) { + logger.warn("Index [{}] is closed", aliasName); + return false; + } + final IndexRoutingTable routingTable = state.routingTable().index(metaData.getIndex()); + if (routingTable == null || routingTable.allPrimaryShardsActive() == false) { + logger.debug("Index [{}] is not yet active", aliasName); + return false; } else { - return clusterState.routingTable().index(metaData.getIndex()); + return true; } } @@ -402,15 +417,15 @@ public class SecurityIndexManager implements ClusterStateListener { * Return true if the state moves from an unhealthy ("RED") index state to a healthy ("non-RED") state. */ public static boolean isMoveFromRedToNonRed(State previousState, State currentState) { - return (previousState.indexStatus == null || previousState.indexStatus == ClusterHealthStatus.RED) - && currentState.indexStatus != null && currentState.indexStatus != ClusterHealthStatus.RED; + return (previousState.indexHealth == null || previousState.indexHealth == ClusterHealthStatus.RED) + && currentState.indexHealth != null && currentState.indexHealth != ClusterHealthStatus.RED; } /** * Return true if the state moves from the index existing to the index not existing. */ public static boolean isIndexDeleted(State previousState, State currentState) { - return previousState.indexStatus != null && currentState.indexStatus == null; + return previousState.indexHealth != null && currentState.indexHealth == null; } private static byte[] readTemplateAsBytes(String templateName) { @@ -440,24 +455,27 @@ public class SecurityIndexManager implements ClusterStateListener { * State of the security index. */ public static class State { - public static final State UNRECOVERED_STATE = new State(null, false, false, false, null, null, null); + public static final State UNRECOVERED_STATE = new State(null, false, false, false, null, null, null, null); public final Instant creationTime; public final boolean isIndexUpToDate; public final boolean indexAvailable; public final boolean mappingUpToDate; public final Version mappingVersion; public final String concreteIndexName; - public final ClusterHealthStatus indexStatus; + public final ClusterHealthStatus indexHealth; + public final IndexMetaData.State indexState; public State(Instant creationTime, boolean isIndexUpToDate, boolean indexAvailable, - boolean mappingUpToDate, Version mappingVersion, String concreteIndexName, ClusterHealthStatus indexStatus) { + boolean mappingUpToDate, Version mappingVersion, String concreteIndexName, ClusterHealthStatus indexHealth, + IndexMetaData.State indexState) { this.creationTime = creationTime; this.isIndexUpToDate = isIndexUpToDate; this.indexAvailable = indexAvailable; this.mappingUpToDate = mappingUpToDate; this.mappingVersion = mappingVersion; this.concreteIndexName = concreteIndexName; - this.indexStatus = indexStatus; + this.indexHealth = indexHealth; + this.indexState = indexState; } @Override @@ -471,7 +489,8 @@ public class SecurityIndexManager implements ClusterStateListener { mappingUpToDate == state.mappingUpToDate && Objects.equals(mappingVersion, state.mappingVersion) && Objects.equals(concreteIndexName, state.concreteIndexName) && - indexStatus == state.indexStatus; + indexHealth == state.indexHealth && + indexState == state.indexState; } public boolean indexExists() { @@ -481,7 +500,7 @@ public class SecurityIndexManager implements ClusterStateListener { @Override public int hashCode() { return Objects.hash(creationTime, isIndexUpToDate, indexAvailable, mappingUpToDate, mappingVersion, concreteIndexName, - indexStatus); + indexHealth); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index d8a7d944794..cb130325089 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.UUIDs; @@ -364,7 +365,7 @@ public class AuthenticationServiceTests extends ESTestCase { // green to yellow or yellow to green previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + currentState = dummyState(previousState.indexHealth == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); service.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, service.getNumInvalidation()); @@ -1402,6 +1403,7 @@ public class AuthenticationServiceTests extends ESTestCase { } private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { - return new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, indexStatus); + return new SecurityIndexManager.State( + Instant.now(), true, true, true, null, concreteSecurityIndexName, indexStatus, IndexMetaData.State.OPEN); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java index 28625f20627..a8682c4e21d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.TestEnvironment; @@ -27,7 +28,8 @@ public class NativeRealmTests extends ESTestCase { RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_6, RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_7); private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { - return new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, indexStatus); + return new SecurityIndexManager.State( + Instant.now(), true, true, true, null, concreteSecurityIndexName, indexStatus, IndexMetaData.State.OPEN); } public void testCacheClearOnIndexHealthChange() { @@ -72,7 +74,7 @@ public class NativeRealmTests extends ESTestCase { // green to yellow or yellow to green previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + currentState = dummyState(previousState.indexHealth == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); nativeRealm.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 3cca6cc4fd3..4dd1f13cf03 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -138,7 +139,12 @@ public class NativeRoleMappingStoreTests extends ESTestCase { } private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { - return new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, indexStatus); + return indexState(true, indexStatus); + } + + private SecurityIndexManager.State indexState(boolean isUpToDate, ClusterHealthStatus healthStatus) { + return new SecurityIndexManager.State( + Instant.now(), isUpToDate, true, true, null, concreteSecurityIndexName, healthStatus, IndexMetaData.State.OPEN); } public void testCacheClearOnIndexHealthChange() { @@ -172,7 +178,7 @@ public class NativeRoleMappingStoreTests extends ESTestCase { // green to yellow or yellow to green previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + currentState = dummyState(previousState.indexHealth == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); store.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); @@ -182,14 +188,10 @@ public class NativeRoleMappingStoreTests extends ESTestCase { final AtomicInteger numInvalidation = new AtomicInteger(0); final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, true); - store.onSecurityIndexStateChange( - new SecurityIndexManager.State(Instant.now(), false, true, true, null, concreteSecurityIndexName, null), - new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, null)); + store.onSecurityIndexStateChange(indexState(false, null), indexState(true, null)); assertEquals(1, numInvalidation.get()); - store.onSecurityIndexStateChange( - new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, null), - new SecurityIndexManager.State(Instant.now(), false, true, true, null, concreteSecurityIndexName, null)); + store.onSecurityIndexStateChange(indexState(true, null), indexState(false, null)); assertEquals(2, numInvalidation.get()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index a39545f3a9b..b4e0a6a22cf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -763,7 +763,12 @@ public class CompositeRolesStoreTests extends ESTestCase { } private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { - return new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, indexStatus); + return dummyIndexState(true, indexStatus); + } + + public SecurityIndexManager.State dummyIndexState(boolean isIndexUpToDate, ClusterHealthStatus healthStatus) { + return new SecurityIndexManager.State( + Instant.now(), isIndexUpToDate, true, true, null, concreteSecurityIndexName, healthStatus, IndexMetaData.State.OPEN); } public void testCacheClearOnIndexHealthChange() { @@ -812,7 +817,7 @@ public class CompositeRolesStoreTests extends ESTestCase { // green to yellow or yellow to green previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + currentState = dummyState(previousState.indexHealth == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); compositeRolesStore.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); @@ -837,14 +842,10 @@ public class CompositeRolesStoreTests extends ESTestCase { } }; - compositeRolesStore.onSecurityIndexStateChange( - new SecurityIndexManager.State(Instant.now(), false, true, true, null, concreteSecurityIndexName, null), - new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, null)); + compositeRolesStore.onSecurityIndexStateChange(dummyIndexState(false, null), dummyIndexState(true, null)); assertEquals(1, numInvalidation.get()); - compositeRolesStore.onSecurityIndexStateChange( - new SecurityIndexManager.State(Instant.now(), true, true, true, null, concreteSecurityIndexName, null), - new SecurityIndexManager.State(Instant.now(), false, true, true, null, concreteSecurityIndexName, null)); + compositeRolesStore.onSecurityIndexStateChange(dummyIndexState(true, null), dummyIndexState(false, null)); assertEquals(2, numInvalidation.get()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 3dd5395b1fe..8704f337deb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -155,8 +155,8 @@ public class SecurityIndexManagerTests extends ESTestCase { manager.clusterChanged(event(clusterStateBuilder)); assertTrue(listenerCalled.get()); - assertNull(previousState.get().indexStatus); - assertEquals(ClusterHealthStatus.GREEN, currentState.get().indexStatus); + assertNull(previousState.get().indexHealth); + assertEquals(ClusterHealthStatus.GREEN, currentState.get().indexHealth); // reset and call with no change to the index listenerCalled.set(false); @@ -191,8 +191,8 @@ public class SecurityIndexManagerTests extends ESTestCase { event = new ClusterChangedEvent("different index health", clusterStateBuilder.build(), previousClusterState); manager.clusterChanged(event); assertTrue(listenerCalled.get()); - assertEquals(ClusterHealthStatus.GREEN, previousState.get().indexStatus); - assertEquals(ClusterHealthStatus.RED, currentState.get().indexStatus); + assertEquals(ClusterHealthStatus.GREEN, previousState.get().indexHealth); + assertEquals(ClusterHealthStatus.RED, currentState.get().indexHealth); // swap prev and current listenerCalled.set(false); @@ -201,8 +201,8 @@ public class SecurityIndexManagerTests extends ESTestCase { event = new ClusterChangedEvent("different index health swapped", previousClusterState, clusterStateBuilder.build()); manager.clusterChanged(event); assertTrue(listenerCalled.get()); - assertEquals(ClusterHealthStatus.RED, previousState.get().indexStatus); - assertEquals(ClusterHealthStatus.GREEN, currentState.get().indexStatus); + assertEquals(ClusterHealthStatus.RED, previousState.get().indexHealth); + assertEquals(ClusterHealthStatus.GREEN, currentState.get().indexHealth); } public void testWriteBeforeStateNotRecovered() throws Exception { @@ -247,7 +247,7 @@ public class SecurityIndexManagerTests extends ESTestCase { assertThat(prepareRunnableCalled.get(), is(true)); } - public void testListeneredNotCalledBeforeStateNotRecovered() throws Exception { + public void testListenerNotCalledBeforeStateNotRecovered() throws Exception { final AtomicBoolean listenerCalled = new AtomicBoolean(false); manager.addIndexStateListener((prev, current) -> { listenerCalled.set(true); @@ -307,6 +307,31 @@ public class SecurityIndexManagerTests extends ESTestCase { assertTrue(manager.isIndexUpToDate()); } + public void testProcessClosedIndexState() throws Exception { + // Index initially exists + final ClusterState.Builder indexAvailable = createClusterState(RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_7, + RestrictedIndicesNames.SECURITY_MAIN_ALIAS, TEMPLATE_NAME, IndexMetaData.State.OPEN); + markShardsAvailable(indexAvailable); + + manager.clusterChanged(event(indexAvailable)); + assertThat(manager.indexExists(), is(true)); + assertThat(manager.isAvailable(), is(true)); + + // Now close it + final ClusterState.Builder indexClosed = createClusterState(RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_7, + RestrictedIndicesNames.SECURITY_MAIN_ALIAS, TEMPLATE_NAME, IndexMetaData.State.CLOSE); + if (randomBoolean()) { + // In old/mixed cluster versions closed indices have no routing table + indexClosed.routingTable(RoutingTable.EMPTY_ROUTING_TABLE); + } else { + markShardsAvailable(indexClosed); + } + + manager.clusterChanged(event(indexClosed)); + assertThat(manager.indexExists(), is(true)); + assertThat(manager.isAvailable(), is(false)); + } + private void assertInitialState() { assertThat(manager.indexExists(), Matchers.equalTo(false)); assertThat(manager.isAvailable(), Matchers.equalTo(false)); @@ -322,18 +347,23 @@ public class SecurityIndexManagerTests extends ESTestCase { } public static ClusterState.Builder createClusterState(String indexName, String aliasName, String templateName) throws IOException { - return createClusterState(indexName, aliasName, templateName, templateName, SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT); + return createClusterState(indexName, aliasName, templateName, IndexMetaData.State.OPEN); + } + + public static ClusterState.Builder createClusterState(String indexName, String aliasName, String templateName, + IndexMetaData.State state) throws IOException { + return createClusterState(indexName, aliasName, templateName, templateName, SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT, state); } public static ClusterState.Builder createClusterState(String indexName, String aliasName, String templateName, int format) throws IOException { - return createClusterState(indexName, aliasName, templateName, templateName, format); + return createClusterState(indexName, aliasName, templateName, templateName, format, IndexMetaData.State.OPEN); } private static ClusterState.Builder createClusterState(String indexName, String aliasName, String templateName, String buildMappingFrom, - int format) throws IOException { + int format, IndexMetaData.State state) throws IOException { IndexTemplateMetaData.Builder templateBuilder = getIndexTemplateMetaData(templateName); - IndexMetaData.Builder indexMeta = getIndexMetadata(indexName, aliasName, buildMappingFrom, format); + IndexMetaData.Builder indexMeta = getIndexMetadata(indexName, aliasName, buildMappingFrom, format, state); MetaData.Builder metaDataBuilder = new MetaData.Builder(); metaDataBuilder.put(templateBuilder); @@ -354,7 +384,8 @@ public class SecurityIndexManagerTests extends ESTestCase { .build(); } - private static IndexMetaData.Builder getIndexMetadata(String indexName, String aliasName, String templateName, int format) + private static IndexMetaData.Builder getIndexMetadata(String indexName, String aliasName, String templateName, int format, + IndexMetaData.State state) throws IOException { IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); indexMetaData.settings(Settings.builder() @@ -364,6 +395,7 @@ public class SecurityIndexManagerTests extends ESTestCase { .put(IndexMetaData.INDEX_FORMAT_SETTING.getKey(), format) .build()); indexMetaData.putAlias(AliasMetaData.builder(aliasName).build()); + indexMetaData.state(state); final Map mappings = getTemplateMappings(templateName); for (Map.Entry entry : mappings.entrySet()) { indexMetaData.putMapping(entry.getKey(), entry.getValue()); From 8de3a88205b33e9276555dfb6eed15bceee401fe Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 4 Jun 2019 14:25:43 +1000 Subject: [PATCH 048/210] Log the status of security on license change (#42741) Whether security is enabled/disabled is dependent on the combination of the node settings and the cluster license. This commit adds a license state listener that logs when the license change causes security to switch state (or to be initialised). This is primarily useful for diagnosing cluster formation issues. Backport of: #42488 --- .../elasticsearch/test/MockLogAppender.java | 2 +- .../xpack/security/Security.java | 2 + .../support/SecurityStatusChangeListener.java | 45 +++++++ .../SecurityStatusChangeListenerTests.java | 115 ++++++++++++++++++ 4 files changed, 163 insertions(+), 1 deletion(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListener.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListenerTests.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index c6a5d77faf5..e9c53ed8967 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -117,7 +117,7 @@ public class MockLogAppender extends AbstractAppender { @Override public void assertMatched() { - assertThat("expected to see " + name + " but did not", saw, equalTo(false)); + assertThat("expected not to see " + name + " but did", saw, equalTo(false)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 9ba3bdab21f..72a9f780065 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -226,6 +226,7 @@ import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction; import org.elasticsearch.xpack.security.support.SecurityIndexManager; +import org.elasticsearch.xpack.security.support.SecurityStatusChangeListener; import org.elasticsearch.xpack.security.transport.SecurityHttpSettings; import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor; import org.elasticsearch.xpack.security.transport.filter.IPFilter; @@ -461,6 +462,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal getLicenseState().addListener(allRolesStore::invalidateAll); + getLicenseState().addListener(new SecurityStatusChangeListener(getLicenseState())); final AuthenticationFailureHandler failureHandler = createAuthenticationFailureHandler(realms); authcService.set(new AuthenticationService(settings, realms, auditTrailService, failureHandler, threadPool, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListener.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListener.java new file mode 100644 index 00000000000..ddc41561afa --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListener.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.license.LicenseStateListener; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.Objects; + +/** + * A listener for license state changes that provides log messages when a license change + * causes security to switch between enable and disabled (or vice versa). + */ +public class SecurityStatusChangeListener implements LicenseStateListener { + + private final Logger logger; + private final XPackLicenseState licenseState; + private Boolean securityEnabled; + + public SecurityStatusChangeListener(XPackLicenseState licenseState) { + this.logger = LogManager.getLogger(getClass()); + this.licenseState = licenseState; + this.securityEnabled = null; + } + + /** + * This listener will not be registered if security has been explicitly disabled, so we only need to account for dynamic changes due + * to changes in the applied license. + */ + @Override + public synchronized void licenseStateChanged() { + final boolean newState = licenseState.isSecurityAvailable() && licenseState.isSecurityDisabledByLicenseDefaults() == false; + // old state might be null (undefined) so do Object comparison + if (Objects.equals(newState, securityEnabled) == false) { + logger.info("Active license is now [{}]; Security is {}", licenseState.getOperationMode(), newState ? "enabled" : "disabled"); + this.securityEnabled = newState; + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListenerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListenerTests.java new file mode 100644 index 00000000000..da18d5dc902 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityStatusChangeListenerTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mockito; + +import static org.mockito.Mockito.when; + +public class SecurityStatusChangeListenerTests extends ESTestCase { + + private XPackLicenseState licenseState; + private SecurityStatusChangeListener listener; + private MockLogAppender logAppender; + private Logger listenerLogger; + + @Before + public void setup() throws IllegalAccessException { + licenseState = Mockito.mock(XPackLicenseState.class); + when(licenseState.isSecurityAvailable()).thenReturn(true); + + listener = new SecurityStatusChangeListener(licenseState); + + logAppender = new MockLogAppender(); + logAppender.start(); + listenerLogger = LogManager.getLogger(listener.getClass()); + Loggers.addAppender(listenerLogger, logAppender); + } + + @After + public void cleanup() { + Loggers.removeAppender(listenerLogger, logAppender); + logAppender.stop(); + } + + public void testSecurityEnabledToDisabled() { + when(licenseState.isSecurityDisabledByLicenseDefaults()).thenReturn(false); + + when(licenseState.getOperationMode()).thenReturn(License.OperationMode.GOLD); + logAppender.addExpectation(new MockLogAppender.SeenEventExpectation( + "initial change", + listener.getClass().getName(), + Level.INFO, + "Active license is now [GOLD]; Security is enabled" + )); + listener.licenseStateChanged(); + + when(licenseState.getOperationMode()).thenReturn(License.OperationMode.PLATINUM); + logAppender.addExpectation(new MockLogAppender.UnseenEventExpectation( + "no-op change", + listener.getClass().getName(), + Level.INFO, + "Active license is now [PLATINUM]; Security is enabled" + )); + + when(licenseState.isSecurityDisabledByLicenseDefaults()).thenReturn(true); + when(licenseState.getOperationMode()).thenReturn(License.OperationMode.BASIC); + logAppender.addExpectation(new MockLogAppender.SeenEventExpectation( + "change to basic", + listener.getClass().getName(), + Level.INFO, + "Active license is now [BASIC]; Security is disabled" + )); + listener.licenseStateChanged(); + + logAppender.assertAllExpectationsMatched(); + } + + public void testSecurityDisabledToEnabled() { + when(licenseState.isSecurityDisabledByLicenseDefaults()).thenReturn(true); + + when(licenseState.getOperationMode()).thenReturn(License.OperationMode.TRIAL); + logAppender.addExpectation(new MockLogAppender.SeenEventExpectation( + "initial change", + listener.getClass().getName(), + Level.INFO, + "Active license is now [TRIAL]; Security is disabled" + )); + listener.licenseStateChanged(); + + when(licenseState.getOperationMode()).thenReturn(License.OperationMode.BASIC); + logAppender.addExpectation(new MockLogAppender.UnseenEventExpectation( + "no-op change", + listener.getClass().getName(), + Level.INFO, + "Active license is now [BASIC]; Security is disabled" + )); + + when(licenseState.isSecurityDisabledByLicenseDefaults()).thenReturn(false); + when(licenseState.getOperationMode()).thenReturn(License.OperationMode.PLATINUM); + logAppender.addExpectation(new MockLogAppender.SeenEventExpectation( + "change to platinum", + listener.getClass().getName(), + Level.INFO, + "Active license is now [PLATINUM]; Security is enabled" + )); + listener.licenseStateChanged(); + + logAppender.assertAllExpectationsMatched(); + } + +} From 9f470c20ed81e1ba6010cf5f3ac723a6aa9e0b9e Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 3 Jun 2019 17:20:47 +0100 Subject: [PATCH 049/210] More improvements to cluster coordination docs (#42799) This commit addresses a few more frequently-asked questions: * clarifies that bootstrapping doesn't happen even after a full cluster restart. * removes the example that uses IP addresses, to try and further encourage the use of node names for bootstrapping. * clarifies that auto-bootstrapping might form different clusters on different hosts, and gives a process for starting again if this wasn't what you wanted. * adds the "do not stop half-or-more of the master-eligible nodes" slogan that was notably absent. * reformats one of the console examples to a narrower width --- .../discovery/adding-removing-nodes.asciidoc | 32 ++++++++------ .../modules/discovery/bootstrapping.asciidoc | 44 ++++++++++++------- 2 files changed, 48 insertions(+), 28 deletions(-) diff --git a/docs/reference/modules/discovery/adding-removing-nodes.asciidoc b/docs/reference/modules/discovery/adding-removing-nodes.asciidoc index ccc0e991253..2a62bb5e49d 100644 --- a/docs/reference/modules/discovery/adding-removing-nodes.asciidoc +++ b/docs/reference/modules/discovery/adding-removing-nodes.asciidoc @@ -35,30 +35,36 @@ four of the nodes at once: to do so would leave only three nodes remaining, which is less than half of the voting configuration, which means the cluster cannot take any further actions. +More precisely, if you shut down half or more of the master-eligible nodes all +at the same time then the cluster will normally become unavailable. If this +happens then you can bring the cluster back online by starting the removed +nodes again. + As long as there are at least three master-eligible nodes in the cluster, as a general rule it is best to remove nodes one-at-a-time, allowing enough time for the cluster to <> the voting configuration and adapt the fault tolerance level to the new set of nodes. If there are only two master-eligible nodes remaining then neither node can be -safely removed since both are required to reliably make progress. You must first -inform Elasticsearch that one of the nodes should not be part of the voting -configuration, and that the voting power should instead be given to other nodes. -You can then take the excluded node offline without preventing the other node -from making progress. A node which is added to a voting configuration exclusion -list still works normally, but Elasticsearch tries to remove it from the voting -configuration so its vote is no longer required. Importantly, Elasticsearch -will never automatically move a node on the voting exclusions list back into the -voting configuration. Once an excluded node has been successfully +safely removed since both are required to reliably make progress. To remove one +of these nodes you must first inform {es} that it should not be part of the +voting configuration, and that the voting power should instead be given to the +other node. You can then take the excluded node offline without preventing the +other node from making progress. A node which is added to a voting +configuration exclusion list still works normally, but {es} tries to remove it +from the voting configuration so its vote is no longer required. Importantly, +{es} will never automatically move a node on the voting exclusions list back +into the voting configuration. Once an excluded node has been successfully auto-reconfigured out of the voting configuration, it is safe to shut it down without affecting the cluster's master-level availability. A node can be added -to the voting configuration exclusion list using the <> API. For example: +to the voting configuration exclusion list using the +<> API. For example: [source,js] -------------------------------------------------- -# Add node to voting configuration exclusions list and wait for the system to -# auto-reconfigure the node out of the voting configuration up to the default -# timeout of 30 seconds +# Add node to voting configuration exclusions list and wait for the system +# to auto-reconfigure the node out of the voting configuration up to the +# default timeout of 30 seconds POST /_cluster/voting_config_exclusions/node_name # Add node to voting configuration exclusions list and wait for diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index 2b17af17ec5..cc7cb0ea912 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -6,8 +6,9 @@ set of <> to be explicitly defined on one or more of the master-eligible nodes in the cluster. This is known as _cluster bootstrapping_. This is only required the very first time the cluster starts up: nodes that have already joined a cluster store this information in their -data folder and freshly-started nodes that are joining an existing cluster -obtain this information from the cluster's elected master. +data folder for use in a <>, and +freshly-started nodes that are joining a running cluster obtain this +information from the cluster's elected master. The initial set of master-eligible nodes is defined in the <>. This should be @@ -58,19 +59,6 @@ cluster.initial_master_nodes: - master-c -------------------------------------------------- -If it is not possible to use the names of the nodes then you can also use IP -addresses, or IP addresses and ports, or even a mix of IP addresses and node -names: - -[source,yaml] --------------------------------------------------- -cluster.initial_master_nodes: - - 10.0.10.101 - - 10.0.10.102:9300 - - 10.0.10.102:9301 - - master-node-name --------------------------------------------------- - Like all node settings, it is also possible to specify the initial set of master nodes on the command-line that is used to start Elasticsearch: @@ -139,3 +127,29 @@ in the <>: * `discovery.seed_providers` * `discovery.seed_hosts` * `cluster.initial_master_nodes` + +[NOTE] +================================================== + +[[modules-discovery-bootstrap-cluster-joining]] If you start an {es} node +without configuring these settings then it will start up in development mode and +auto-bootstrap itself into a new cluster. If you start some {es} nodes on +different hosts then by default they will not discover each other and will form +a different cluster on each host. {es} will not merge separate clusters together +after they have formed, even if you subsequently try and configure all the nodes +into a single cluster. This is because there is no way to merge these separate +clusters together without a risk of data loss. You can tell that you have formed +separate clusters by checking the cluster UUID reported by `GET /` on each node. +If you intended to form a single cluster then you should start again: + +* Take a <> of each of the single-host clusters if + you do not want to lose any data that they hold. Note that each cluster must + use its own snapshot repository. +* Shut down all the nodes. +* Completely wipe each node by deleting the contents of their + <>. +* Configure `cluster.initial_master_nodes` as described above. +* Restart all the nodes and verify that they have formed a single cluster. +* <> any snapshots as required. + +================================================== From 928f49992f38660f1331c8d46bf5e415e4cccc54 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 4 Jun 2019 19:48:37 +1000 Subject: [PATCH 050/210] Don't require TLS for single node clusters (#42830) This commit removes the TLS cluster join validator. This validator existed to prevent v6.x nodes (which mandated TLS) from joining an existing cluster of v5.x nodes (which did not mandate TLS) unless the 6.x node (and by implication the 5.x nodes) was configured to use TLS. Since 7.x nodes cannot talk to 5.x nodes, this validator is no longer needed. Removing the validator solves a problem where single node clusters that were bound to local interfaces were incorrectly requiring TLS when they recovered cluster state and joined their own cluster. Backport of: #42826 --- .../xpack/security/Security.java | 28 +--------- .../xpack/security/SecurityTests.java | 56 ------------------- 2 files changed, 1 insertion(+), 83 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 72a9f780065..abb02d617ab 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -40,7 +40,6 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; @@ -986,37 +985,12 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw @Override public BiConsumer getJoinValidator() { if (enabled) { - return new ValidateTLSOnJoin(XPackSettings.TRANSPORT_SSL_ENABLED.get(settings), - DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings), settings) - .andThen(new ValidateUpgradedSecurityIndex()) - .andThen(new ValidateLicenseCanBeDeserialized()) + return new ValidateUpgradedSecurityIndex() .andThen(new ValidateLicenseForFIPS(XPackSettings.FIPS_MODE_ENABLED.get(settings))); } return null; } - static final class ValidateTLSOnJoin implements BiConsumer { - private final boolean isTLSEnabled; - private final String discoveryType; - private final Settings settings; - - ValidateTLSOnJoin(boolean isTLSEnabled, String discoveryType, Settings settings) { - this.isTLSEnabled = isTLSEnabled; - this.discoveryType = discoveryType; - this.settings = settings; - } - - @Override - public void accept(DiscoveryNode node, ClusterState state) { - License license = LicenseService.getLicense(state.metaData()); - if (isTLSEnabled == false && "single-node".equals(discoveryType) == false - && XPackLicenseState.isTransportTlsRequired(license, settings)) { - throw new IllegalStateException("Transport TLS ([" + XPackSettings.TRANSPORT_SSL_ENABLED.getKey() + - "]) is required for license type [" + license.operationMode().description() + "] when security is enabled"); - } - } - } - static final class ValidateUpgradedSecurityIndex implements BiConsumer { @Override public void accept(DiscoveryNode node, ClusterState state) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 2a2178a0bf7..92130ca5f4e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -65,7 +65,6 @@ import java.util.function.Predicate; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; -import static org.elasticsearch.discovery.DiscoveryModule.ZEN2_DISCOVERY_TYPE; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT; import static org.hamcrest.Matchers.containsString; @@ -243,61 +242,6 @@ public class SecurityTests extends ESTestCase { assertNull(joinValidator); } - public void testTLSJoinValidator() throws Exception { - createComponents(Settings.EMPTY); - BiConsumer joinValidator = security.getJoinValidator(); - assertNotNull(joinValidator); - DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); - joinValidator.accept(node, ClusterState.builder(ClusterName.DEFAULT).build()); - int numIters = randomIntBetween(1, 10); - for (int i = 0; i < numIters; i++) { - boolean tlsOn = randomBoolean(); - boolean securityExplicitlyEnabled = randomBoolean(); - String discoveryType = randomFrom("single-node", ZEN2_DISCOVERY_TYPE, ZEN2_DISCOVERY_TYPE, randomAlphaOfLength(4)); - - final Settings settings; - if (securityExplicitlyEnabled) { - settings = Settings.builder().put("xpack.security.enabled", true).build(); - } else { - settings = Settings.EMPTY; - } - Security.ValidateTLSOnJoin validator = new Security.ValidateTLSOnJoin(tlsOn, discoveryType, settings); - MetaData.Builder builder = MetaData.builder(); - License.OperationMode licenseMode = randomFrom(License.OperationMode.values()); - License license = TestUtils.generateSignedLicense(licenseMode.description(), TimeValue.timeValueHours(24)); - TestUtils.putLicense(builder, license); - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metaData(builder.build()).build(); - - final boolean expectFailure; - switch (licenseMode) { - case PLATINUM: - case GOLD: - case STANDARD: - expectFailure = tlsOn == false && "single-node".equals(discoveryType) == false; - break; - case BASIC: - expectFailure = tlsOn == false && "single-node".equals(discoveryType) == false && securityExplicitlyEnabled; - break; - case MISSING: - case TRIAL: - expectFailure = false; - break; - default: - throw new AssertionError("unknown operation mode [" + license.operationMode() + "]"); - } - logger.info("Test TLS join; Lic:{} TLS:{} Disco:{} Settings:{} ; Expect Failure: {}", - licenseMode, tlsOn, discoveryType, settings.toDelimitedString(','), expectFailure); - if (expectFailure) { - IllegalStateException ise = expectThrows(IllegalStateException.class, () -> validator.accept(node, state)); - assertEquals("Transport TLS ([xpack.security.transport.ssl.enabled]) is required for license type [" - + license.operationMode().description() + "] when security is enabled", ise.getMessage()); - } else { - validator.accept(node, state); - } - validator.accept(node, ClusterState.builder(ClusterName.DEFAULT).metaData(MetaData.builder().build()).build()); - } - } - public void testJoinValidatorForLicenseDeserialization() throws Exception { DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), VersionUtils.randomVersionBetween(random(), null, Version.V_6_3_0)); From 440ec4d9f53d42155a96b1ed16ee21c376c87d25 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 4 Jun 2019 14:08:41 +0300 Subject: [PATCH 051/210] [Backport 7.x] OpenID Connect realm guide (#42836) This commit adds a configuration guide for the newly introduced OpenID Connect realm. The guide is similar to the style of the SAML Guide and shares certain parts where applicable (role mapping) It also contains a short section on how the realm can be used for authenticating users without Kibana. Co-Authored-By: Lisa Cawley Backport of #41423 and #42555 --- x-pack/docs/build.gradle | 13 + x-pack/docs/en/rest-api/security.asciidoc | 4 +- .../rest-api/security/authenticate.asciidoc | 4 +- .../security/oidc-authenticate-api.asciidoc | 2 +- .../security/oidc-logout-api.asciidoc | 2 +- .../oidc-prepare-authentication-api.asciidoc | 20 +- .../authentication/oidc-guide.asciidoc | 685 ++++++++++++++++++ 7 files changed, 714 insertions(+), 16 deletions(-) create mode 100644 x-pack/docs/en/security/authentication/oidc-guide.asciidoc diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index d33ccddcf50..efe08ff3b29 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -31,6 +31,7 @@ project.copyRestSpec.from(xpackResources) { } testClusters.integTest { + extraConfigFile 'op-jwks.json', xpackProject('test:idp-fixture').file("oidc/op-jwks.json") setting 'xpack.security.enabled', 'true' setting 'xpack.security.authc.api_key.enabled', 'true' setting 'xpack.security.authc.token.enabled', 'true' @@ -38,6 +39,18 @@ testClusters.integTest { setting 'xpack.monitoring.exporters._local.type', 'local' setting 'xpack.monitoring.exporters._local.enabled', 'false' setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.authc.realms.file.file.order', '0' + setting 'xpack.security.authc.realms.native.native.order', '1' + setting 'xpack.security.authc.realms.oidc.oidc1.order', '2' + setting 'xpack.security.authc.realms.oidc.oidc1.op.issuer', 'http://127.0.0.1:8080' + setting 'xpack.security.authc.realms.oidc.oidc1.op.authorization_endpoint', "http://127.0.0.1:8080/c2id-login" + setting 'xpack.security.authc.realms.oidc.oidc1.op.token_endpoint', "http://127.0.0.1:8080/c2id/token" + setting 'xpack.security.authc.realms.oidc.oidc1.op.jwkset_path', 'op-jwks.json' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.redirect_uri', 'https://my.fantastic.rp/cb' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.client_id', 'elasticsearch-rp' + keystore 'xpack.security.authc.realms.oidc.oidc1.rp.client_secret', 'b07efb7a1cf6ec9462afe7b6d3ab55c6c7880262aa61ac28dded292aca47c9a2' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.response_type', 'id_token' + setting 'xpack.security.authc.realms.oidc.oidc1.claims.principal', 'sub' user username: 'test_admin' } diff --git a/x-pack/docs/en/rest-api/security.asciidoc b/x-pack/docs/en/rest-api/security.asciidoc index c04bae90801..abad1e38d77 100644 --- a/x-pack/docs/en/rest-api/security.asciidoc +++ b/x-pack/docs/en/rest-api/security.asciidoc @@ -76,6 +76,8 @@ native realm: * <> * <> +[float] +[[security-openid-apis]] === OpenID Connect You can use the following APIs to authenticate users against an OpenID Connect @@ -110,7 +112,7 @@ include::security/get-users.asciidoc[] include::security/has-privileges.asciidoc[] include::security/invalidate-api-keys.asciidoc[] include::security/invalidate-tokens.asciidoc[] -include::security/ssl.asciidoc[] include::security/oidc-prepare-authentication-api.asciidoc[] include::security/oidc-authenticate-api.asciidoc[] include::security/oidc-logout-api.asciidoc[] +include::security/ssl.asciidoc[] diff --git a/x-pack/docs/en/rest-api/security/authenticate.asciidoc b/x-pack/docs/en/rest-api/security/authenticate.asciidoc index 51b0d644194..d23c410a623 100644 --- a/x-pack/docs/en/rest-api/security/authenticate.asciidoc +++ b/x-pack/docs/en/rest-api/security/authenticate.asciidoc @@ -46,11 +46,11 @@ The following example output provides information about the "rdeniro" user: "metadata": { }, "enabled": true, "authentication_realm": { - "name" : "default_file", + "name" : "file", "type" : "file" }, "lookup_realm": { - "name" : "default_file", + "name" : "file", "type" : "file" } } diff --git a/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc b/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc index 0efb2b23145..bc60e4fbf23 100644 --- a/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc +++ b/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc @@ -51,7 +51,7 @@ POST /_security/oidc/authenticate } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] +// TEST[catch:unauthorized] The following example output contains the access token that was generated in response, the amount of time (in seconds) that the token expires in, the type, and the refresh token: diff --git a/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc b/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc index 6f5288a135f..cb8840ca535 100644 --- a/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc +++ b/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc @@ -39,7 +39,7 @@ POST /_security/oidc/logout } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] +// TEST[catch:unauthorized] The following example output of the response contains the URI pointing to the End Session Endpoint of the OpenID Connect Provider with all the parameters of the Logout Request, as HTTP GET parameters diff --git a/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc b/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc index aeb400ce97e..a6ce410be6e 100644 --- a/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc +++ b/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc @@ -57,20 +57,19 @@ POST /_security/oidc/prepare } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] - The following example output of the response contains the URI pointing to the Authorization Endpoint of the OpenID Connect Provider with all the parameters of the Authentication Request, as HTTP GET parameters [source,js] -------------------------------------------------- { - "redirect" : "https://op-provider.org/login?scope=openid&response_type=code&redirect_uri=http%3A%2F%2Foidc-kibana.elastic.co%3A5603%2Fkmi%2Fapi%2Fsecurity%2Fv1%2Foidc&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I&nonce=WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM&client_id=0o43gasov3TxMWJOt839", + "redirect" : "http://127.0.0.1:8080/c2id-login?scope=openid&response_type=id_token&redirect_uri=https%3A%2F%2Fmy.fantastic.rp%2Fcb&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I&nonce=WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM&client_id=elasticsearch-rp", "state" : "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", "nonce" : "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM" } -------------------------------------------------- -// NOTCONSOLE +// TESTRESPONSE[s/4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I/\$\{body.state\}/] +// TESTRESPONSE[s/WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM/\$\{body.nonce\}/] The following example generates an authentication request for the OpenID Connect Realm `oidc1`, where the values for the state and the nonce have been generated by the client @@ -85,7 +84,6 @@ POST /_security/oidc/prepare } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] The following example output of the response contains the URI pointing to the Authorization Endpoint of the OpenID Connect Provider with all the parameters of the Authentication Request, as HTTP GET parameters @@ -93,12 +91,12 @@ OpenID Connect Provider with all the parameters of the Authentication Request, a [source,js] -------------------------------------------------- { - "redirect" : "https://op-provider.org/login?scope=openid&response_type=code&redirect_uri=http%3A%2F%2Foidc-kibana.elastic.co%3A5603%2Fkmi%2Fapi%2Fsecurity%2Fv1%2Foidc&state=lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO&nonce=zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5&client_id=0o43gasov3TxMWJOt839", + "redirect" : "http://127.0.0.1:8080/c2id-login?scope=openid&response_type=id_token&redirect_uri=https%3A%2F%2Fmy.fantastic.rp%2Fcb&state=lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO&nonce=zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5&client_id=elasticsearch-rp", "state" : "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO", "nonce" : "zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5" } -------------------------------------------------- -// NOTCONSOLE +// TESTRESPONSE The following example generates an authentication request for a 3rd party initiated single sign on, specifying the issuer that should be used for matching the appropriate OpenID Connect Authentication realm @@ -107,12 +105,11 @@ issuer that should be used for matching the appropriate OpenID Connect Authentic -------------------------------------------------- POST /_security/oidc/prepare { - "issuer" : "https://op-issuer.org:8800", + "iss" : "http://127.0.0.1:8080", "login_hint": "this_is_an_opaque_string" } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] The following example output of the response contains the URI pointing to the Authorization Endpoint of the OpenID Connect Provider with all the parameters of the Authentication Request, as HTTP GET parameters @@ -120,9 +117,10 @@ OpenID Connect Provider with all the parameters of the Authentication Request, a [source,js] -------------------------------------------------- { - "redirect" : "https://op-provider.org/login?scope=openid&response_type=code&redirect_uri=http%3A%2F%2Foidc-kibana.elastic.co%3A5603%2Fkmi%2Fapi%2Fsecurity%2Fv1%2Foidc&state=lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO&nonce=zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5&client_id=0o43gasov3TxMWJOt839&login_hint=this_is_an_opaque_string", + "redirect" : "http://127.0.0.1:8080/c2id-login?login_hint=this_is_an_opaque_string&scope=openid&response_type=id_token&redirect_uri=https%3A%2F%2Fmy.fantastic.rp%2Fcb&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I&nonce=WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM&client_id=elasticsearch-rp", "state" : "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", "nonce" : "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM" } -------------------------------------------------- -// NOTCONSOLE \ No newline at end of file +// TESTRESPONSE[s/4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I/\$\{body.state\}/] +// TESTRESPONSE[s/WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM/\$\{body.nonce\}/] \ No newline at end of file diff --git a/x-pack/docs/en/security/authentication/oidc-guide.asciidoc b/x-pack/docs/en/security/authentication/oidc-guide.asciidoc new file mode 100644 index 00000000000..d7d68ada0e7 --- /dev/null +++ b/x-pack/docs/en/security/authentication/oidc-guide.asciidoc @@ -0,0 +1,685 @@ +[role="xpack"] +[[oidc-guide]] + +== Configuring single sign-on to the {stack} using OpenID Connect + +The Elastic Stack supports single sign-on (SSO) using OpenID Connect via {kib} using +{es} as the backend service that holds most of the functionality. {kib} and {es} +together represent an OpenID Connect Relying Party (RP) that supports the Authorization +Code Flow as this is defined in the OpenID Connect specification. + +This guide assumes that you have an OpenID Connect Provider where the +Elastic Stack Relying Party will be registered. + +NOTE: The OpenID Connect realm support in {kib} is designed with the expectation that it +will be the primary authentication method for the users of that {kib} instance. The +<> section describes what this entails and how you can set it up to support +other realms if necessary. + +[[oidc-guide-op]] +=== The OpenID Connect Provider + +The OpenID Connect Provider (OP) is the entity in OpenID Connect that is responsible for +authenticating the user and for granting the necessary tokens with the authentication and +user information to be consumed by the Relying Parties. + +In order for the Elastic Stack to be able use your OpenID Connect Provider for authentication, +a trust relationship needs to be established between the OP and the RP. In the OpenID Connect +Provider, this means registering the RP as a client. OpenID Connect defines a dynamic client +registration protocol but this is usually geared towards real-time client registration and +not the trust establishment process for cross security domain single sign on. All OPs will +also allow for the manual registration of an RP as a client, via a user interface or (less often) +via the consumption of a metadata document. + +The process for registering the Elastic Stack RP will be different from OP to OP and following +the provider's relevant documentation is prudent. The information for the +RP that you commonly need to provide for registration are the following: + +- `Relying Party Name`: An arbitrary identifier for the relying party. Neither the specification +nor the Elastic Stack implementation impose any constraints on this value. +- `Redirect URI`: This is the URI where the OP will redirect the user's browser after authentication. The +appropriate value for this will depend on your setup and whether or not {kib} sits behind a proxy or +load balancer. It will typically be +$\{kibana-url}/api/security/v1/oidc+ where _$\{kibana-url}_ +is the base URL for your {kib} instance. You might also see this called `Callback URI`. + +At the end of the registration process, the OP will assign a Client Identifier and a Client Secret for the RP ({stack}) to use. +Note these two values as they will be used in the {es} configuration. + +[[oidc-guide-authentication]] +=== Configure {es} for OpenID Connect authentication + +The following is a summary of the configuration steps required in order to enable authentication +using OpenID Connect in {es}: + +. <> +. <> +. <> +. <> + +[[oidc-enable-http]] +==== Enable TLS for HTTP + +If your {es} cluster is operating in production mode, then you must +configure the HTTP interface to use SSL/TLS before you can enable OpenID Connect +authentication. + +For more information, see +{ref}/configuring-tls.html#tls-http[Encrypting HTTP Client Communications]. + +[[oidc-enable-token]] +==== Enable the token service + +The {es} OpenID Connect implementation makes use of the {es} Token Service. This service +is automatically enabled if you configure TLS on the HTTP interface, and can be +explicitly configured by including the following in your `elasticsearch.yml` file: + +[source, yaml] +------------------------------------------------------------ +xpack.security.authc.token.enabled: true +------------------------------------------------------------ + +[[oidc-create-realm]] +==== Create an OpenID Connect realm + +OpenID Connect based authentication is enabled by configuring the appropriate realm within +the authentication chain for {es}. + +This realm has a few mandatory settings, and a number of optional settings. +The available settings are described in detail in the +{ref}/security-settings.html#ref-oidc-settings[Security settings in {es}]. This +guide will explore the most common settings. + +Create an OpenID Connect (the realm type is `oidc`) realm in your `elasticsearch.yml` file +similar to what is shown below: + +NOTE: The values used below are meant to be an example and are not intended to apply to +every use case. The details below the configuration snippet provide insights and suggestions +to help you pick the proper values, depending on your OP configuration. + +[source, yaml] +------------------------------------------------------------------------------------- +xpack.security.authc.realms.oidc.oidc1: + order: 2 + rp.client_id: "the_client_id" + rp.response_type: code + rp.redirect_uri: "https://kibana.example.org:5601/api/security/v1/oidc" + op.issuer: "https://op.example.org" + op.authorization_endpoint: "https://op.example.org/oauth2/v1/authorize" + op.token_endpoint: "https://op.example.org/oauth2/v1/token" + op.jwkset_path: oidc/jwkset.json + op.userinfo_endpoint: "https://op.example.org/oauth2/v1/userinfo" + op.endsession_endpoint: "https://op.example.org/oauth2/v1/logout" + rp.post_logout_redirect_uri: "https://kibana.example.org:5601/logged_out" + claims.principal: sub + claims.groups: "http://example.info/claims/groups" +------------------------------------------------------------------------------------- + +The configuration values used in the example above are: + +xpack.security.authc.realms.oidc.oidc1:: + This defines a new `oidc` authentication realm named "oidc1". + See <> for more explanation of realms. + +order:: + You should define a unique order on each realm in your authentication chain. + It is recommended that the OpenID Connect realm be at the bottom of your authentication + chain (that is, that it has the _highest_ order). + +rp.client_id:: + This, usually opaque, arbitrary string, is the Client Identifier that was assigned to the Elastic Stack RP by the OP upon + registration. + +rp.response_type:: + This is an identifier that controls which OpenID Connect authentication flow this RP supports and also + which flow this RP requests the OP should follow. Supported values are + - `code`, which means that the RP wants to use the Authorization Code flow. If your OP supports the + Authorization Code flow, you should select this instead of the Implicit Flow. + - `id_token token` which means that the RP wants to use the Implicit flow and we also request an oAuth2 + access token from the OP, that we can potentially use for follow up requests ( UserInfo ). This + should be selected if the OP offers a UserInfo endpoint in its configuration, or if you know that + the claims you will need to use for role mapping are not available in the ID Token. + - `id_token` which means that the RP wants to use the Implicit flow, but is not interested in getting + an oAuth2 token too. Select this if you are certain that all necessary claims will be contained in + the ID Token or if the OP doesn't offer a User Info endpoint. + +rp.redirect_uri:: + The redirect URI where the OP will redirect the browser after authentication. This needs to be + _exactly_ the same as the one <> and will + typically be +$\{kibana-url}/api/security/v1/oidc+ where _$\{kibana-url}_ is the base URL for your {kib} instance + +op.issuer:: + A verifiable Identifier for your OpenID Connect Provider. An Issuer Identifier is usually a case sensitive URL. + The value for this setting should be provided by your OpenID Connect Provider. + +op.authorization_endpoint:: + The URL for the Authorization Endpoint in the OP. This is where the user's browser + will be redirected to start the authentication process. The value for this setting should be provided by your + OpenID Connect Provider. + +op.token_endpoint:: + The URL for the Token Endpoint in the OpenID Connect Provider. This is the endpoint where + {es} will send a request to exchange the code for an ID Token, in the case where the Authorization Code + flow is used. The value for this setting should be provided by your OpenID Connect Provider. + +op.jwkset_path:: + The path to a file or a URL containing a JSON Web Key Set with the key material that the OpenID Connect + Provider uses for signing tokens and claims responses. If a path is set, it is resolved relative to the {es} + config directory. + {es} will automatically monitor this file for changes and will reload the configuration whenever + it is updated. Your OpenID Connect Provider should provide you with this file or a URL where it is available. + +op.userinfo_endpoint:: + (Optional) The URL for the UserInfo Endpoint in the OpenID Connect Provider. This is the endpoint of the OP that + can be queried to get further user information, if required. The value for this setting should be provided by your + OpenID Connect Provider. + +op.endsession_endpoint:: + (Optional) The URL to the End Session Endpoint in the OpenID Connect Provider. This is the endpoint where the user's + browser will be redirected after local logout, if the realm is configured for RP initiated Single Logout and + the OP supports it. The value for this setting should be provided by your OpenID Connect Provider. + +rp.post_logout_redirect_uri:: + (Optional) The Redirect URL where the OpenID Connect Provider should redirect the user after a + successful Single Logout (assuming `op.endsession_endpoint` above is also set). This should be set to a value that + will not trigger a new OpenID Connect Authentication, such as +$\{kibana-url}/logged_out+ where _$\{kibana-url}_ is + the base URL for your {kib} instance. + +claims.principal:: See <>. +claims.groups:: See <>. + +A final piece of configuration of the OpenID Connect realm is to set the `Client Secret` that was assigned +to the RP during registration in the OP. This is a secure setting and as such is not defined in the realm +configuration in `elasticsearch.yml` but added to the {ref}/secure-settings.html[elasticsearch keystore]. +For instance + + +[source,sh] +---- +bin/elasticsearch-keystore add xpack.security.authc.realms.oidc.oidc1.rp.client_secret +---- + + +NOTE: According to the OpenID Connect specification, the OP should also make their configuration +available at a well known URL, which is the concatenation of their `Issuer` value with the +`.well-known/openid-configuration` string. For example: `https://op.org.com/.well-known/openid-configuration` +That document should contain all the necessary information to configure the OpenID Connect realm in {es}. + + +[[oidc-claims-mapping]] +==== Claims mapping + +===== Claims and scopes + +When authenticating to {kib} using OpenID Connect, the OP will provide information about the user +in the form of OpenID Connect Claims, that can be included either in the ID Token, or be retrieved from the +UserInfo endpoint of the OP. The claim is defined as a piece of information asserted by the OP +for the authenticated user. Simply put, a claim is a name/value pair that contains information about +the user. Related to claims, we also have the notion of OpenID Connect Scopes. Scopes are identifiers +that are used to request access to specific lists of claims. The standard defines a set of scope +identifiers that can be requested. The only mandatory one is `openid`, while commonly used ones are +`profile` and `email`. The `profile` scope requests access to the `name`,`family_name`,`given_name`,`middle_name`,`nickname`, +`preferred_username`,`profile`,`picture`,`website`,`gender`,`birthdate`,`zoneinfo`,`locale`, and `updated_at` claims. +The `email` scope requests access to the `email` and `email_verified` claims. The process is that +the RP requests specific scopes during the authentication request. If the OP Privacy Policy +allows it and the authenticating user consents to it, the related claims are returned to the +RP (either in the ID Token or as a UserInfo response). + +The list of the supported claims will vary depending on the OP you are using, but you can expect +the https://openid.net/specs/openid-connect-core-1_0.html#StandardClaims[Standard Claims] to be +largely supported. + +[[oidc-claim-to-property]] +===== Mapping claims to user properties + +The goal of claims mapping is to configure {es} in such a way as to be able to map the values of +specified returned claims to one of the <> that are supported +by {es}. These user properties are then utilized to identify the user in the {kib} UI or the audit +logs, and can also be used to create <> rules. + +The recommended steps for configuring OpenID Claims mapping are as follows: + +. Consult your OP configuration to see what claims it might support. Note that + the list provided in the OP's metadata or in the configuration page of the OP + is a list of potentially supported claims. However, for privacy reasons it might + not be a complete one, or not all supported claims will be available for all + authenticated users. + +. Read through the list of <> that {es} + supports, and decide which of them are useful to you, and can be provided by + your OP in the form of claims. At a _minimum_, the `principal` user property + is required. + +. Configure your OP to "release" those claims to your {stack} Relying + party. This process greatly varies by provider. You can use a static + configuration while others will support that the RP requests the scopes that + correspond to the claims to be "released" on authentication time. See + {ref}/security-settings.html#ref-oidc-settings[`rp.requested_scopes`] for details about how + to configure the scopes to request. To ensure interoperability and minimize + the errors, you should only request scopes that the OP supports, and which you + intend to map to {es} user properties. + +. Configure the OpenID Connect realm in {es} to associate the {es} user properties (see + <> below), to the name of the claims that your + OP will release. In the example above, we have configured the `principal` and + `groups` user properties as follows: + + .. `claims.principal: sub` : This instructs {es} to look for the OpenID Connect claim named `sub` + in the ID Token that the OP issued for the user ( or in the UserInfo response ) and assign the + value of this claim to the `principal` user property. `sub` is a commonly used claim for the + principal property as it is an identifier of the user in the OP and it is also a required + claim of the ID Token, thus offering guarantees that it will be available. It is, however, + only used as an example here, the OP may provide another claim that is a better fit for your needs. + + .. `claims.groups: "http://example.info/claims/groups"` : Similarly, this instructs {es} to look + for the claim with the name `http://example.info/claims/groups` (note that this is a URI - an + identifier, treated as a string and not a URL pointing to a location that will be retrieved) + either in the ID Token or in the UserInfo response, and map the value(s) of it to the user + property `groups` in {es}. There is no standard claim in the specification that is used for + expressing roles or group memberships of the authenticated user in the OP, so the name of the + claim that should be mapped here, will vary greatly between providers. Consult your OP + documentation for more details. + +[[oidc-user-properties]] +===== {es} user properties + +The {es} OpenID Connect realm can be configured to map OpenID Connect claims to the +following properties on the authenticated user: + +principal:: _(Required)_ + This is the _username_ that will be applied to a user that authenticates + against this realm. + The `principal` appears in places such as the {es} audit logs. + +NOTE: If the principal property fails to be mapped from a claim, the authentication fails. + +groups:: _(Recommended)_ + If you wish to use your OP's concept of groups or roles as the basis for a + user's {es} privileges, you should map them with this property. + The `groups` are passed directly to your <>. + +name:: _(Optional)_ The user's full name. +mail:: _(Optional)_ The user's email address. +dn:: _(Optional)_ The user's X.500 _Distinguished Name_. + + +===== Extracting partial values from OpenID Connect claims + +There are some occasions where the value of a claim may contain more information +than you wish to use within {es}. A common example of this is one where the +OP works exclusively with email addresses, but you would like the user's +`principal` to use the _local-name_ part of the email address. +For example if their email address was `james.wong@staff.example.com`, then you +would like their principal to simply be `james.wong`. + +This can be achieved using the `claim_patterns` setting in the {es} +realm, as demonstrated in the realm configuration below: + +[source, yaml] +------------------------------------------------------------------------------------- +xpack.security.authc.realms.oidc.oidc1: + order: 2 + rp.client_id: "the_client_id" + rp.response_type: code + rp.redirect_uri: "https://kibana.example.org:5601/api/security/v1/oidc" + op.authorization_endpoint: "https://op.example.org/oauth2/v1/authorize" + op.token_endpoint: "https://op.example.org/oauth2/v1/token" + op.userinfo_endpoint: "https://op.example.org/oauth2/v1/userinfo" + op.endsession_endpoint: "https://op.example.org/oauth2/v1/logout" + op.issuer: "https://op.example.org" + op.jwkset_path: oidc/jwkset.json + claims.principal: email_verified + claim_patterns.principal: "^([^@]+)@staff\\.example\\.com$" +------------------------------------------------------------------------------------- + +In this case, the user's `principal` is mapped from the `email_verified` claim, but a +regular expression is applied to the value before it is assigned to the user. +If the regular expression matches, then the result of the first group is used as the +effective value. If the regular expression does not match then the claim +mapping fails. + +In this example, the email address must belong to the `staff.example.com` domain, +and then the local-part (anything before the `@`) is used as the principal. +Any users who try to login using a different email domain will fail because the +regular expression will not match against their email address, and thus their +principal user property - which is mandatory - will not be populated. + +IMPORTANT: Small mistakes in these regular expressions can have significant +security consequences. For example, if we accidentally left off the trailing +`$` from the example above, then we would match any email address where the +domain starts with `staff.example.com`, and this would accept an email +address such as `admin@staff.example.com.attacker.net`. It is important that +you make sure your regular expressions are as precise as possible so that +you do not inadvertently open an avenue for user impersonation attacks. + +[[third-party-login]] +==== Third party initiated single sign-on + +The Open ID Connect realm in {es} supports 3rd party initiated login as described in the +https://openid.net/specs/openid-connect-core-1_0.html#ThirdPartyInitiatedLogin[relevant specification]. + +This allows the OP itself or another, third party other than the RP, to initiate the authentication +process while requesting the OP to be used for the authentication. Please note that the Elastic +Stack RP should already be configured for this OP, in order for this process to succeed. + + +[[oidc-logout]] +==== OpenID Connect Logout + +The OpenID Connect realm in {es} supports RP-Initiated Logout Functionality as +described in the +https://openid.net/specs/openid-connect-session-1_0.html#RPLogout[relevant part of the specification] + +In this process, the OpenID Connect RP (the Elastic Stack in this case) will redirect the user's +browser to predefined URL of the OP after successfully completing a local logout. The OP can then +logout the user also, depending on the configuration, and should finally redirect the user back to the +RP. The `op.endsession_endpoint` in the realm configuration determines the URL in the OP that the browser +will be redirected to. The `rp.post_logout_redirect_uri` setting determines the URL to redirect +the user back to after the OP logs them out. + +When configuring `rp.post_logout_redirect_uri`, care should be taken to not point this to a URL that +will trigger re-authentication of the user. For instance, when using OpenID Connect to support +single sign-on to {kib}, this could be set to +$\{kibana-url}/logged_out+, which will show a user- +friendly message to the user. + +[[oidc-ssl-config]] +==== OpenID Connect Realm SSL Configuration + +OpenID Connect depends on TLS to provide security properties such as encryption in transit and endpoint authentication. The RP +is required to establish back-channel communication with the OP in order to exchange the code for an ID Token during the +Authorization code grant flow and in order to get additional user information from the UserInfo endpoint. Furthermore, if +you configure `op.jwks_path` as a URL, {es} will need to get the OP's signing keys from the file hosted there. As such, it is +important that {es} can validate and trust the server certificate that the OP uses for TLS. Since the system truststore is +used for the client context of outgoing https connections, if your OP is using a certificate from a trusted CA, no additional +configuration is needed. + +However, if the issuer of your OP's certificate is not trusted by the JVM on which {es} is running (e.g it uses a organization CA), then you must configure +{es} to trust that CA. Assuming that you have the CA certificate that has signed the certificate that the OP uses for TLS +stored in the /oidc/company-ca.pem` file stored in the configuration directory of {es}, you need to set the following +property in the realm configuration: + +[source, yaml] +------------------------------------------------------------------------------------- +xpack.security.authc.realms.oidc.oidc1: + order: 1 + ... + ssl.certificate_authorities: ["/oidc/company-ca.pem"] +------------------------------------------------------------------------------------- + +[[oidc-role-mapping]] +=== Configuring role mappings + +When a user authenticates using OpenID Connect, they are identified to the Elastic Stack, +but this does not automatically grant them access to perform any actions or +access any data. + +Your OpenID Connect users cannot do anything until they are assigned roles. This can be done +through either the +{ref}/security-api-put-role-mapping.html[add role mapping API], or with +<>. + +NOTE: You cannot use {stack-ov}/mapping-roles.html#mapping-roles-file[role mapping files] +to grant roles to users authenticating via OpenID Connect. + +This is an example of a simple role mapping that grants the `kibana_user` role +to any user who authenticates against the `oidc1` OpenID Connect realm: + +[source,js] +-------------------------------------------------- +PUT /_security/role_mapping/oidc-kibana +{ + "roles": [ "kibana_user" ], + "enabled": true, + "rules": { + "field": { "realm.name": "oidc1" } + } +} +-------------------------------------------------- +// CONSOLE +// TEST + + +The user properties that are mapped via the realm configuration are used to process +role mapping rules, and these rules determine which roles a user is granted. + +The user fields that are provided to the role +mapping are derived from the OpenID Connect claims as follows: + +- `username`: The `principal` user property +- `dn`: The `dn` user property +- `groups`: The `groups` user property +- `metadata`: See <> + +For more information, see <> and +{ref}/security-api.html#security-role-mapping-apis[role mapping APIs]. + +If your OP has the ability to provide groups or roles to RPs via tha use of +an OpenID Claim, then you should map this claim to the `claims.groups` setting in +the {es} realm (see <>), and then make use of it in a role mapping +as per the example below. + +This mapping grants the {es} `finance_data` role, to any users who authenticate +via the `oidc1` realm with the `finance-team` group membership. + +[source,js] +-------------------------------------------------- +PUT /_security/role_mapping/oidc-finance +{ + "roles": [ "finance_data" ], + "enabled": true, + "rules": { "all": [ + { "field": { "realm.name": "oidc1" } }, + { "field": { "groups": "finance-team" } } + ] } +} +-------------------------------------------------- +// CONSOLE +// TEST + +If your users also exist in a repository that can be directly accessed by {es} +(such as an LDAP directory) then you can use +<> instead of role mappings. + +In this case, you perform the following steps: +1. In your OpenID Connect realm, assign a claim to act as the lookup userid, + by configuring the `claims.principal` setting. +2. Create a new realm that can lookup users from your local repository (e.g. an + `ldap` realm) +3. In your OpenID Connect realm, set `authorization_realms` to the name of the realm you + created in step 2. + +[[oidc-user-metadata]] +=== User metadata + +By default users who authenticate via OpenID Connect will have some additional metadata +fields. These fields will include every OpenID Claim that is provided in the authentication response +(regardless of whether it is mapped to an {es} user property). For example, +in the metadata field `oidc(claim_name)`, "claim_name" is the name of the +claim as it was contained in the ID Token or in the User Info response. Note that these will +include all the https://openid.net/specs/openid-connect-core-1_0.html#IDToken[ID Token claims] +that pertain to the authentication event, rather than the user themselves. + +This behaviour can be disabled by adding `populate_user_metadata: false` as +a setting in the oidc realm. + +[[oidc-kibana]] +=== Configuring {kib} + +OpenID Connect authentication in {kib} requires a small number of additional settings +in addition to the standard {kib} security configuration. The +{kibana-ref}/using-kibana-with-security.html[{kib} security documentation] +provides details on the available configuration options that you can apply. + +In particular, since your {es} nodes have been configured to use TLS on the HTTP +interface, you must configure {kib} to use a `https` URL to connect to {es}, and +you may need to configure `elasticsearch.ssl.certificateAuthorities` to trust +the certificates that {es} has been configured to use. + +OpenID Connect authentication in {kib} is also subject to the +`xpack.security.sessionTimeout` setting that is described in the {kib} security +documentation, and you may wish to adjust this timeout to meet your local needs. + +The three additional settings that are required for OpenID Connect support are shown below: + +[source, yaml] +------------------------------------------------------------ +xpack.security.authProviders: [oidc] +xpack.security.auth.oidc.realm: "oidc1" +server.xsrf.whitelist: [/api/security/v1/oidc] +------------------------------------------------------------ + +The configuration values used in the example above are: + +`xpack.security.authProviders`:: +Set this to `[ oidc ]` to instruct {kib} to use OpenID Connect single sign-on as the +authentication method. This instructs Kibana to attempt to initiate an SSO flow +everytime a user attempts to access a URL in Kibana, if the user is not already +authenticated. If you also want to allow users to login with a username and password, +you must enable the `basic` authProvider too. For example: + +[source, yaml] +------------------------------------------------------------ +xpack.security.authProviders: [oidc, basic] +------------------------------------------------------------ + +This will allow users that haven't already authenticated with OpenID Connect to +navigate directly to the `/login` page in {kib} in order to use the login form. + +`xpack.security.auth.oidc.realm`:: +The name of the OpenID Connect realm in {es} that should handle authentication +for this Kibana instance. + +`server.xsrf.whitelist`:: +{kib} has in-built protection against _Cross Site Request Forgery_ attacks, which +is designed to prevent the {kib} server from processing requests that +originated from outside the {kib} application. +In order to support OpenID Connect messages that originate from your +OP or a third party (see <>, we need to explicitly _whitelist_ the +OpenID Connect authentication endpoint within {kib}, so that the {kib} server will +not reject these external messages. + + +=== OpenID Connect without {kib} + +The OpenID Connect realm is designed to allow users to authenticate to {kib} and as +such, most of the parts of the guide above make the assumption that {kib} is used. +This section describes how a custom web application could use the relevant OpenID +Connect REST APIs in order to authenticate the users to {es}, with OpenID Connect. + +Single sign-on realms such as OpenID Connect and SAML make use of the Token Service in +{es} and in principle exchange a SAML or OpenID Connect Authentication response for +an {es} access token and a refresh token. The access token is used as credentials for subsequent calls to {es}. The +refresh token enables the user to get new {es} access tokens after the current one +expires. + +NOTE: The {es} Token Service can be seen as a minimal oAuth2 authorization server +and the access token and refresh token mentioned above are tokens that pertain +_only_ to this authorization server. They are generated and consumed _only_ by {es} +and are in no way related to the tokens ( access token and ID Token ) that the +OpenID Connect Provider issues. + +==== Register the RP with an OpenID Connect Provider + +The Relying Party ( {es} and the custom web app ) will need to be registered as +client with the OpenID Connect Provider. Note that when registering the +`Redirect URI`, it needs to be a URL in the custom web app. + +==== OpenID Connect Realm + +An OpenID Connect realm needs to be created and configured accordingly +in {es}. See <> + +==== Service Account user for accessing the APIs + +The realm is designed with the assumption that there needs to be a privileged entity +acting as an authentication proxy. In this case, the custom web application is the +authentication proxy handling the authentication of end users ( more correctly, +"delegating" the authentication to the OpenID Connect Provider ). The OpenID Connect +APIs require authentication and the necessary authorization level for the authenticated +user. For this reason, a Service Account user needs to be created and assigned a role +that gives them the `manage_oidc` cluster privilege. The use of the `manage_token` +cluster privilege will be necessary after the authentication takes place, so that the +the user can maintain access or be subsequently logged out. + +[source,js] +-------------------------------------------------- +POST /_security/role/facilitator-role +{ + "cluster" : ["manage_oidc", "manage_token"] +} +-------------------------------------------------- +// CONSOLE + + +[source,js] +-------------------------------------------------- +POST /_security/user/facilitator +{ + "password" : "", + "roles" : [ "facilitator-role"] +} +-------------------------------------------------- +// CONSOLE + + +==== Handling the authentication flow + +On a high level, the custom web application would need to perform the following steps in order to +authenticate a user with OpenID Connect: + +. Make an HTTP POST request to `_security/oidc/prepare`, authenticating as the `facilitator` user, using the name of the +OpenID Connect realm in the {es} configuration in the request body. See the +{ref}/security-api-oidc-prepare-authentication.html[OIDC Prepare Authentication API] for more details ++ +[source,js] +-------------------------------------------------- +POST /_security/oidc/prepare +{ + "realm" : "oidc1" +} +-------------------------------------------------- +// CONSOLE ++ +. Handle the response to `/_security/oidc/prepare`. The response from {es} will contain 3 parameters: + `redirect`, `state`, `nonce`. The custom web application would need to store the values for `state` + and `nonce` in the user's session (client side in a cookie or server side if session information is + persisted this way) and redirect the user's browser to the URL that will be contained in the + `redirect` value. +. Handle a subsequent response from the OP. After the user is successfully authenticated with the + OpenID Connect Provider, they will be redirected back to the callback/redirect URI. Upon receiving + this HTTP GET request, the custom web app will need to make an HTTP POST request to + `_security/oidc/authenticate`, again - authenticating as the `facilitator` user - passing the URL + where the user's browser was redirected to, as a parameter, along with the + values for `nonce` and `state` it had saved in the user's session previously. + See {ref}/security-api-oidc-authenticate.html[OIDC Authenticate API] for more details ++ +[source,js] +----------------------------------------------------------------------- +POST /_security/oidc/authenticate +{ + "redirect_uri" : "https://oidc-kibana.elastic.co:5603/api/security/v1/oidc?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", + "state" : "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", + "nonce" : "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM" +} +----------------------------------------------------------------------- +// CONSOLE +// TEST[catch:unauthorized] ++ +Elasticsearch will validate this and if all is correct will respond with an access token that can be used + as a `Bearer` token for subsequent requests and a refresh token that can be later used to refresh the given + access token as described in {ref}/security-api-get-token.html[get token API]. +. At some point, if necessary, the custom web application can log the user out by using the + {ref}/security-api-oidc-logout.html[OIDC Logout API] passing the access token and refresh token as parameters. For example: ++ +[source,js] +-------------------------------------------------- +POST /_security/oidc/logout +{ + "token" : "dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==", + "refresh_token": "vLBPvmAB6KvwvJZr27cS" +} +-------------------------------------------------- +// CONSOLE +// TEST[catch:unauthorized] ++ +If the realm is configured accordingly, this may result in a response with a `redirect` parameter indicating where +the user needs to be redirected in the OP in order to complete the logout process. From 32eae0dfe93179502884fb9d07811e67ce5038a1 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 4 Jun 2019 07:19:58 -0500 Subject: [PATCH 052/210] [ML] [Data Frame] Adding supported aggs in docs (#42728) (#42842) * [ML] [Data Frame] Adding supported aggs in docs * [DOCS] Moves pivot to definitions list --- .../data-frames/apis/pivotresource.asciidoc | 26 +++++++++++++++++++ .../data-frames/apis/put-transform.asciidoc | 2 +- docs/reference/rest-api/defs.asciidoc | 2 ++ 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 docs/reference/data-frames/apis/pivotresource.asciidoc diff --git a/docs/reference/data-frames/apis/pivotresource.asciidoc b/docs/reference/data-frames/apis/pivotresource.asciidoc new file mode 100644 index 00000000000..ae8f5572850 --- /dev/null +++ b/docs/reference/data-frames/apis/pivotresource.asciidoc @@ -0,0 +1,26 @@ +[role="xpack"] +[testenv="basic"] +[[data-frame-transform-pivot]] +=== Pivot resources + +A pivot configuration object has the following properties: + +`group_by` (required):: (object) Defines how to group the data. More than one grouping can be defined per pivot. The following groupings are supported: +* {ref}/search-aggregations-bucket-composite-aggregation.html#_terms[Terms] +* {ref}/search-aggregations-bucket-composite-aggregation.html#_histogram[Histogram] +* {ref}/search-aggregations-bucket-composite-aggregation.html#_date_histogram[Date Histogram] + +`aggregations` (required):: (object) Defines how to aggregate the grouped data. +The following aggregations are supported: +* {ref}/search-aggregations-metrics-avg-aggregation.html[Average] +* {ref}/search-aggregations-metrics-weight-avg-aggregation.html[Weighted Average] +* {ref}/search-aggregations-metrics-cardinality-aggregation.html[Cardinality] +* {ref}/search-aggregations-metrics-geocentroid-aggregation.html[Geo Centroid] +* {ref}/search-aggregations-metrics-max-aggregation.html[Max] +* {ref}/search-aggregations-metrics-min-aggregation.html[Min] +* {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Scripted Metric] +* {ref}/search-aggregations-metrics-sum-aggregation.html[Sum] +* {ref}/search-aggregations-metrics-valuecount-aggregation.html[Value Count] +* {ref}/search-aggregations-metrics-bucket_script-aggregation.html[Bucket Script] + +//For more information, see {stack-ov}/ml-dataframes.html[dataframes-cap}]. \ No newline at end of file diff --git a/docs/reference/data-frames/apis/put-transform.asciidoc b/docs/reference/data-frames/apis/put-transform.asciidoc index f452c38ab4c..fcc86fa3237 100644 --- a/docs/reference/data-frames/apis/put-transform.asciidoc +++ b/docs/reference/data-frames/apis/put-transform.asciidoc @@ -39,7 +39,7 @@ a `query`. `dest` (required):: (object) The destination configuration, consisting of `index`. `pivot`:: (object) Defines the pivot function `group by` fields and the aggregation to -reduce the data. +reduce the data. See <>. `description`:: Optional free text description of the data frame transform diff --git a/docs/reference/rest-api/defs.asciidoc b/docs/reference/rest-api/defs.asciidoc index 823b63cbe57..65a0384a3d3 100644 --- a/docs/reference/rest-api/defs.asciidoc +++ b/docs/reference/rest-api/defs.asciidoc @@ -12,6 +12,7 @@ These resource definitions are used in APIs related to {ml-features} and * <> * <> * <> +* <> * <> * <> * <> @@ -22,6 +23,7 @@ include::{es-repo-dir}/ml/apis/filterresource.asciidoc[] include::{es-repo-dir}/ml/apis/jobresource.asciidoc[] include::{es-repo-dir}/ml/apis/jobcounts.asciidoc[] include::{es-repo-dir}/ml/apis/snapshotresource.asciidoc[] +include::{es-repo-dir}/data-frames/apis/pivotresource.asciidoc[] include::{xes-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[] include::{es-repo-dir}/ml/apis/resultsresource.asciidoc[] include::{es-repo-dir}/ml/apis/eventresource.asciidoc[] From d9c582e66bdecb9acc74a06cd49645dcabdc37cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 4 Jun 2019 14:37:55 +0200 Subject: [PATCH 053/210] [Docs] Add to preference parameter docs (#42797) Adding notes to the existing docs about how using `preference` might increase request cache utilization but also add warning about the downsides. Closes #24278 --- docs/reference/search/request/preference.asciidoc | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index 5f3fcb2efa6..7412f04844c 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -6,8 +6,12 @@ default, Elasticsearch selects from the available shard copies in an unspecified order, taking the <> and <> configuration into account. However, it may sometimes be desirable to try and route certain -searches to certain sets of shard copies, for instance to make better use of -per-copy caches. +searches to certain sets of shard copies. + +A possible use case would be to make use of per-copy caches like the +<>. Doing this, however, runs contrary to the +idea of search parallelization and can create hotspots on certain nodes because +the load might not be evenly distributed anymore. The `preference` is a query string parameter which can be set to: @@ -64,6 +68,10 @@ GET /_search?preference=xyzabc123 ------------------------------------------------ // CONSOLE +This can be an effective strategy to increase usage of e.g. the request cache for +unique users running similar searches repeatedly by always hitting the same cache, while +requests of different users are still spread across all shard copies. + NOTE: The `_only_local` preference guarantees only to use shard copies on the local node, which is sometimes useful for troubleshooting. All other options do not _fully_ guarantee that any particular shard copies are used in a search, From d050c52fd1b6b055fb588ef2531bc689523426f2 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 4 Jun 2019 08:42:43 -0400 Subject: [PATCH 054/210] [DOCS] Fix broken bucket script agg link --- docs/reference/data-frames/apis/pivotresource.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/data-frames/apis/pivotresource.asciidoc b/docs/reference/data-frames/apis/pivotresource.asciidoc index ae8f5572850..765fa7960b7 100644 --- a/docs/reference/data-frames/apis/pivotresource.asciidoc +++ b/docs/reference/data-frames/apis/pivotresource.asciidoc @@ -21,6 +21,6 @@ The following aggregations are supported: * {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Scripted Metric] * {ref}/search-aggregations-metrics-sum-aggregation.html[Sum] * {ref}/search-aggregations-metrics-valuecount-aggregation.html[Value Count] -* {ref}/search-aggregations-metrics-bucket_script-aggregation.html[Bucket Script] +* {ref}/search-aggregations-pipeline-bucket_script-aggregation.html[Bucket Script] //For more information, see {stack-ov}/ml-dataframes.html[dataframes-cap}]. \ No newline at end of file From 783159dcbc5a1dae5960b7f25ea1f05455b354a2 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 4 Jun 2019 09:39:08 -0400 Subject: [PATCH 055/210] [DOCS] Fix typo in bucket script aggregation link --- docs/reference/data-frames/apis/pivotresource.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/data-frames/apis/pivotresource.asciidoc b/docs/reference/data-frames/apis/pivotresource.asciidoc index 765fa7960b7..64c49af03ea 100644 --- a/docs/reference/data-frames/apis/pivotresource.asciidoc +++ b/docs/reference/data-frames/apis/pivotresource.asciidoc @@ -21,6 +21,6 @@ The following aggregations are supported: * {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Scripted Metric] * {ref}/search-aggregations-metrics-sum-aggregation.html[Sum] * {ref}/search-aggregations-metrics-valuecount-aggregation.html[Value Count] -* {ref}/search-aggregations-pipeline-bucket_script-aggregation.html[Bucket Script] +* {ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Bucket Script] //For more information, see {stack-ov}/ml-dataframes.html[dataframes-cap}]. \ No newline at end of file From df124f32db8aba051e1b3d04e7150e10a1e3616c Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 4 Jun 2019 14:32:43 +0100 Subject: [PATCH 056/210] Refactor control flow in TransportAnalyzeAction (#42801) The control flow in TransportAnalyzeAction is currently spread across two large methods, and is quite difficult to follow. This commit tidies things up a bit, to make it clearer when we use pre-defined analyzers and when we use custom built ones. --- .../admin/indices/analyze/AnalyzeAction.java | 12 ++ .../analyze/TransportAnalyzeAction.java | 202 +++++++++--------- .../indices/TransportAnalyzeActionTests.java | 82 ++++--- .../indices/analyze/AnalyzeRequestTests.java | 32 ++- 4 files changed, 191 insertions(+), 137 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 65c54ce70d4..6dfa4bf4c44 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -282,6 +282,18 @@ public class AnalyzeAction extends Action { validationException = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); } + if (analyzer != null && (tokenizer != null || charFilters.isEmpty() == false || tokenFilters.isEmpty() == false)) { + validationException + = addValidationError("cannot define extra components on a named analyzer", validationException); + } + if (normalizer != null && (tokenizer != null || charFilters.isEmpty() == false || tokenFilters.isEmpty() == false)) { + validationException + = addValidationError("cannot define extra components on a named normalizer", validationException); + } + if (field != null && (tokenizer != null || charFilters.isEmpty() == false || tokenFilters.isEmpty() == false)) { + validationException + = addValidationError("cannot define extra components on a field-specific analyzer", validationException); + } return validationException; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index abee1b07505..b6079cc9c69 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -124,72 +124,92 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenizerFactory = parseTokenizerFactory(request, indexAnalyzers, - analysisRegistry, environment); + analysisRegistry, environment); List charFilterFactoryList = parseCharFilterFactories(request, indexSettings, analysisRegistry, environment, false); @@ -197,18 +217,11 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenFilterFactoryList = parseTokenFilterFactories(request, indexSettings, analysisRegistry, environment, tokenizerFactory, charFilterFactoryList, false); - analyzer = new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), - charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), - tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])); - closeAnalyzer = true; - } else if (request.normalizer() != null) { - // Get normalizer from indexAnalyzers - analyzer = indexAnalyzers.getNormalizer(request.normalizer()); - if (analyzer == null) { - throw new IllegalArgumentException("failed to find normalizer under [" + request.normalizer() + "]"); - } + return new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), + charFilterFactoryList.toArray(new CharFilterFactory[0]), + tokenFilterFactoryList.toArray(new TokenFilterFactory[0])); } else if (((request.tokenFilters() != null && request.tokenFilters().size() > 0) - || (request.charFilters() != null && request.charFilters().size() > 0))) { + || (request.charFilters() != null && request.charFilters().size() > 0))) { final IndexSettings indexSettings = indexAnalyzers == null ? null : indexAnalyzers.getIndexSettings(); // custom normalizer = if normalizer == null but filter or char_filter is not null and tokenizer/analyzer is null // get charfilter and filter from request @@ -222,46 +235,29 @@ public class TransportAnalyzeAction extends TransportSingleShardAction(keywordTokenizerName, keywordTokenizerFactory), charFilterFactoryList, true); - analyzer = new CustomAnalyzer("keyword_for_normalizer", - keywordTokenizerFactory, - charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), - tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])); - closeAnalyzer = true; - } else if (analyzer == null) { - if (indexAnalyzers == null) { - analyzer = analysisRegistry.getAnalyzer("standard"); - } else { - analyzer = indexAnalyzers.getDefaultIndexAnalyzer(); - } - } - if (analyzer == null) { - throw new IllegalArgumentException("failed to find analyzer"); + return new CustomAnalyzer("keyword_for_normalizer", keywordTokenizerFactory, + charFilterFactoryList.toArray(new CharFilterFactory[0]), tokenFilterFactoryList.toArray(new TokenFilterFactory[0])); } + return null; + } - List tokens = null; - AnalyzeAction.DetailAnalyzeResponse detail = null; - + private static AnalyzeAction.Response analyze(AnalyzeAction.Request request, Analyzer analyzer, int maxTokenCount) { if (request.explain()) { - detail = detailAnalyze(request, analyzer, field, maxTokenCount); - } else { - tokens = simpleAnalyze(request, analyzer, field, maxTokenCount); + return new AnalyzeAction.Response(null, detailAnalyze(request, analyzer, maxTokenCount)); } - - if (closeAnalyzer) { - analyzer.close(); - } - - return new AnalyzeAction.Response(tokens, detail); + return new AnalyzeAction.Response(simpleAnalyze(request, analyzer, maxTokenCount), null); } private static List simpleAnalyze(AnalyzeAction.Request request, - Analyzer analyzer, String field, int maxTokenCount) { + Analyzer analyzer, int maxTokenCount) { TokenCounter tc = new TokenCounter(maxTokenCount); List tokens = new ArrayList<>(); int lastPosition = -1; int lastOffset = 0; + // Note that we always pass "" as the field to the various Analyzer methods, because + // the analyzers we use here are all field-specific and so ignore this parameter for (String text : request.text()) { - try (TokenStream stream = analyzer.tokenStream(field, text)) { + try (TokenStream stream = analyzer.tokenStream("", text)) { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); @@ -282,8 +278,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction includeAttributes = new HashSet<>(); if (request.attributes() != null) { @@ -338,7 +334,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction includeAttributes) { + private void analyze(TokenStream stream, Analyzer analyzer, Set includeAttributes) { try { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); @@ -477,8 +473,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokens = analyze.getTokens(); assertEquals(4, tokens.size()); @@ -147,8 +158,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addTokenFilter("mock"); - analyze = TransportAnalyzeAction.analyze(request, "text", null, randomBoolean() ? indexAnalyzers : null, registry, environment, - maxTokenCount); + analyze + = TransportAnalyzeAction.analyze(request, registry, environment, randomBoolean() ? mockIndexService() : null, maxTokenCount); tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("qu1ck", tokens.get(0).getTerm()); @@ -160,8 +171,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addCharFilter("append_foo"); - analyze = TransportAnalyzeAction.analyze(request, "text", null, randomBoolean() ? indexAnalyzers : null, registry, environment, - maxTokenCount); + analyze + = TransportAnalyzeAction.analyze(request, registry, environment, randomBoolean() ? mockIndexService() : null, maxTokenCount); tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); @@ -175,8 +186,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.tokenizer("standard"); request.addCharFilter("append"); request.text("the qu1ck brown fox"); - analyze = TransportAnalyzeAction.analyze(request, "text", null, randomBoolean() ? indexAnalyzers : null, registry, environment, - maxTokenCount); + analyze + = TransportAnalyzeAction.analyze(request, registry, environment, randomBoolean() ? mockIndexService() : null, maxTokenCount); tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); @@ -189,7 +200,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.analyzer("standard"); request.text("the 1 brown fox"); - AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, registry, environment, null, maxTokenCount); List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); @@ -221,8 +232,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("custom_analyzer"); - AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, - maxTokenCount); + AnalyzeAction.Response analyze + = TransportAnalyzeAction.analyze(request, registry, environment, mockIndexService(), maxTokenCount); List tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); @@ -230,7 +241,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { assertEquals("fox", tokens.get(2).getTerm()); request.analyzer("standard"); - analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); + analyze = TransportAnalyzeAction.analyze(request, registry, environment, mockIndexService(), maxTokenCount); tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); @@ -241,7 +252,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { // Switch the analyzer out for just a tokenizer request.analyzer(null); request.tokenizer("standard"); - analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); + analyze = TransportAnalyzeAction.analyze(request, registry, environment, mockIndexService(), maxTokenCount); tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); @@ -251,7 +262,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { // Now try applying our token filter request.addTokenFilter("mock"); - analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); + analyze = TransportAnalyzeAction.analyze(request, registry, environment, mockIndexService(), maxTokenCount); tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); @@ -259,24 +270,32 @@ public class TransportAnalyzeActionTests extends ESTestCase { assertEquals("fox", tokens.get(2).getTerm()); } - public void testGetIndexAnalyserWithoutIndexAnalyzers() throws IOException { + public void testGetIndexAnalyserWithoutIndexAnalyzers() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( new AnalyzeAction.Request() .analyzer("custom_analyzer") .text("the qu1ck brown fox-dog"), - "text", null, null, registry, environment, maxTokenCount)); + registry, environment, null, maxTokenCount)); assertEquals(e.getMessage(), "failed to find global analyzer [custom_analyzer]"); } - public void testUnknown() throws IOException { + public void testGetFieldAnalyzerWithoutIndexAnalyzers() { + AnalyzeAction.Request req = new AnalyzeAction.Request().field("field").text("text"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + TransportAnalyzeAction.analyze(req, registry, environment, null, maxTokenCount); + }); + assertEquals(e.getMessage(), "analysis based on a specific field requires an index"); + } + + public void testUnknown() { boolean notGlobal = randomBoolean(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( new AnalyzeAction.Request() .analyzer("foobar") .text("the qu1ck brown fox"), - "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); + registry, environment, notGlobal ? mockIndexService() : null, maxTokenCount)); if (notGlobal) { assertEquals(e.getMessage(), "failed to find analyzer [foobar]"); } else { @@ -288,7 +307,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { new AnalyzeAction.Request() .tokenizer("foobar") .text("the qu1ck brown fox"), - "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); + registry, environment, notGlobal ? mockIndexService() : null, maxTokenCount)); if (notGlobal) { assertEquals(e.getMessage(), "failed to find tokenizer under [foobar]"); } else { @@ -301,7 +320,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { .tokenizer("standard") .addTokenFilter("foobar") .text("the qu1ck brown fox"), - "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); + registry, environment, notGlobal ? mockIndexService() : null, maxTokenCount)); if (notGlobal) { assertEquals(e.getMessage(), "failed to find token filter under [foobar]"); } else { @@ -315,7 +334,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { .addTokenFilter("lowercase") .addCharFilter("foobar") .text("the qu1ck brown fox"), - "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); + registry, environment, notGlobal ? mockIndexService() : null, maxTokenCount)); if (notGlobal) { assertEquals(e.getMessage(), "failed to find char filter under [foobar]"); } else { @@ -327,7 +346,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { new AnalyzeAction.Request() .normalizer("foobar") .text("the qu1ck brown fox"), - "text", null, indexAnalyzers, registry, environment, maxTokenCount)); + registry, environment, mockIndexService(), maxTokenCount)); assertEquals(e.getMessage(), "failed to find normalizer under [foobar]"); } @@ -336,8 +355,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.tokenizer("standard"); request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters() request.text("the quick brown fox"); - AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, - maxTokenCount); + AnalyzeAction.Response analyze + = TransportAnalyzeAction.analyze(request, registry, environment, mockIndexService(), maxTokenCount); List tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); @@ -349,8 +368,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { AnalyzeAction.Request request = new AnalyzeAction.Request("index"); request.normalizer("my_normalizer"); request.text("ABc"); - AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, - maxTokenCount); + AnalyzeAction.Response analyze + = TransportAnalyzeAction.analyze(request, registry, environment, mockIndexService(), maxTokenCount); List tokens = analyze.getTokens(); assertEquals(1, tokens.size()); @@ -361,7 +380,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { * This test is equivalent of calling _analyze without a specific index. * The default value for the maximum token count is used. */ - public void testExceedDefaultMaxTokenLimit() throws IOException{ + public void testExceedDefaultMaxTokenLimit() { // create a string with No. words more than maxTokenCount StringBuilder sbText = new StringBuilder(); for (int i = 0; i <= maxTokenCount; i++){ @@ -375,8 +394,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.text(text); request.analyzer("standard"); IllegalStateException e = expectThrows(IllegalStateException.class, - () -> TransportAnalyzeAction.analyze( - request, "text", null, null, registry, environment, maxTokenCount)); + () -> TransportAnalyzeAction.analyze(request, registry, environment, null, maxTokenCount)); assertEquals(e.getMessage(), "The number of tokens produced by calling _analyze has exceeded the allowed maximum of [" + maxTokenCount + "]." + " This limit can be set by changing the [index.analyze.max_token_count] index level setting."); @@ -386,8 +404,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { request2.analyzer("standard"); request2.explain(true); IllegalStateException e2 = expectThrows(IllegalStateException.class, - () -> TransportAnalyzeAction.analyze( - request2, "text", null, null, registry, environment, maxTokenCount)); + () -> TransportAnalyzeAction.analyze(request2, registry, environment, null, maxTokenCount)); assertEquals(e2.getMessage(), "The number of tokens produced by calling _analyze has exceeded the allowed maximum of [" + maxTokenCount + "]." + " This limit can be set by changing the [index.analyze.max_token_count] index level setting."); } @@ -396,7 +413,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { * This test is equivalent of calling _analyze against a specific index. * The index specific value for the maximum token count is used. */ - public void testExceedSetMaxTokenLimit() throws IOException{ + public void testExceedSetMaxTokenLimit() { // create a string with No. words more than idxMaxTokenCount StringBuilder sbText = new StringBuilder(); for (int i = 0; i <= idxMaxTokenCount; i++){ @@ -409,8 +426,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.text(text); request.analyzer("standard"); IllegalStateException e = expectThrows(IllegalStateException.class, - () -> TransportAnalyzeAction.analyze( - request, "text", null, indexAnalyzers, registry, environment, idxMaxTokenCount)); + () -> TransportAnalyzeAction.analyze(request, registry, environment, null, idxMaxTokenCount)); assertEquals(e.getMessage(), "The number of tokens produced by calling _analyze has exceeded the allowed maximum of [" + idxMaxTokenCount + "]." + " This limit can be set by changing the [index.analyze.max_token_count] index level setting."); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java index 017cf3a8385..2c4c9212fb5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java @@ -26,10 +26,11 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import static org.hamcrest.CoreMatchers.containsString; public class AnalyzeRequestTests extends ESTestCase { - public void testValidation() throws Exception { + public void testValidation() { AnalyzeAction.Request request = new AnalyzeAction.Request(); ActionRequestValidationException e = request.validate(); @@ -66,6 +67,35 @@ public class AnalyzeRequestTests extends ESTestCase { requestAnalyzer.analyzer("analyzer"); e = requestAnalyzer.validate(); assertTrue(e.getMessage().contains("tokenizer/analyze should be null if normalizer is specified")); + + { + AnalyzeAction.Request analyzerPlusDefs = new AnalyzeAction.Request("index"); + analyzerPlusDefs.text("text"); + analyzerPlusDefs.analyzer("analyzer"); + analyzerPlusDefs.addTokenFilter("tokenfilter"); + e = analyzerPlusDefs.validate(); + assertNotNull(e); + assertThat(e.getMessage(), containsString("cannot define extra components on a named analyzer")); + } + + { + AnalyzeAction.Request analyzerPlusDefs = new AnalyzeAction.Request("index"); + analyzerPlusDefs.text("text"); + analyzerPlusDefs.normalizer("normalizer"); + analyzerPlusDefs.addTokenFilter("tokenfilter"); + e = analyzerPlusDefs.validate(); + assertNotNull(e); + assertThat(e.getMessage(), containsString("cannot define extra components on a named normalizer")); + } + { + AnalyzeAction.Request analyzerPlusDefs = new AnalyzeAction.Request("index"); + analyzerPlusDefs.text("text"); + analyzerPlusDefs.field("field"); + analyzerPlusDefs.addTokenFilter("tokenfilter"); + e = analyzerPlusDefs.validate(); + assertNotNull(e); + assertThat(e.getMessage(), containsString("cannot define extra components on a field-specific analyzer")); + } } public void testSerialization() throws IOException { From 6391f90616cca5338f5a77b076df05297562de4d Mon Sep 17 00:00:00 2001 From: Andrey Ershov Date: Tue, 4 Jun 2019 17:22:33 +0200 Subject: [PATCH 057/210] Fix testNoMasterActionsWriteMasterBlock (#42798) This commit performs the proper restore of network disruption. Previously disruptionScheme.stopDisrupting() was called that does not ensure that connectivity between cluster nodes is restored. The test was checking that the cluster has green status, but it was not checking that connectivity between nodes is restored. Here we switch to internalCluster().clearDisruptionScheme(true) which performs both checks before returning. Closes #39688 (cherry picked from commit c8988d5cf5a85f9b28ce148dbf100aaa6682a757) --- .../test/java/org/elasticsearch/cluster/NoMasterNodeIT.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 78b1fef91f2..450aee482d7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -192,7 +192,6 @@ public class NoMasterNodeIT extends ESIntegTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/39688") public void testNoMasterActionsWriteMasterBlock() throws Exception { Settings settings = Settings.builder() .put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false) @@ -264,8 +263,6 @@ public class NoMasterNodeIT extends ESIntegTestCase { assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); } - disruptionScheme.stopDisrupting(); - - client().admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("3").get(); + internalCluster().clearDisruptionScheme(true); } } From 1514d1a1beec433aaf196bf00b158087023a959a Mon Sep 17 00:00:00 2001 From: Chris Cho Date: Wed, 5 Jun 2019 01:28:58 +0900 Subject: [PATCH 058/210] Change shard allocation filter property and api (#42602) The current example is not working and a bit confused. This change tries to match it with the sample of the watcher blog. --- x-pack/docs/en/watcher/how-watcher-works.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/docs/en/watcher/how-watcher-works.asciidoc b/x-pack/docs/en/watcher/how-watcher-works.asciidoc index 80aeb69a38d..5ecc5b41ec6 100644 --- a/x-pack/docs/en/watcher/how-watcher-works.asciidoc +++ b/x-pack/docs/en/watcher/how-watcher-works.asciidoc @@ -149,14 +149,14 @@ primary and all replicas of this particular shard will reload. Because the watches are executed on the node, where the watch shards are, you can create dedicated watcher nodes by using shard allocation filtering. -You could configure nodes with a dedicated `node.attr.watcher: true` property and +You could configure nodes with a dedicated `node.attr.role: watcher` property and then configure the `.watches` index like this: [source,js] ------------------------ PUT .watches/_settings { - "index.routing.allocation.include": "watcher" + "index.routing.allocation.include.role": "watcher" } ------------------------ // CONSOLE @@ -442,4 +442,4 @@ references the `email_notification_subject` template: } } ---------------------------------------------------------------------- -// NOTCONSOLE \ No newline at end of file +// NOTCONSOLE From e44b8b1e2ecc1aa66cee037197ac369a2bb94518 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 4 Jun 2019 13:50:23 -0700 Subject: [PATCH 059/210] [Backport] Remove dependency substitutions 7.x (#42866) * Remove unnecessary usage of Gradle dependency substitution rules (#42773) (cherry picked from commit 12d583dbf6f7d44f00aa365e34fc7e937c3c61f7) --- benchmarks/build.gradle | 2 +- build.gradle | 70 ++---------------- buildSrc/build.gradle | 5 ++ .../gradle/plugin/PluginBuildPlugin.groovy | 10 ++- .../gradle/precommit/PrecommitTasks.groovy | 38 +++++----- .../gradle/test/RestIntegTestTask.groovy | 2 +- .../test/StandaloneRestTestPlugin.groovy | 2 +- .../gradle/tool/ClasspathUtils.java | 23 ++++++ buildSrc/src/main/resources/buildSrc.marker | 0 client/benchmark/build.gradle | 6 +- client/rest-high-level/build.gradle | 24 +++---- client/rest/build.gradle | 4 +- client/sniffer/build.gradle | 6 +- client/test/build.gradle | 2 +- client/transport/build.gradle | 14 ++-- distribution/build.gradle | 2 +- distribution/tools/plugin-cli/build.gradle | 6 +- libs/build.gradle | 10 +-- libs/cli/build.gradle | 2 +- libs/core/build.gradle | 6 +- libs/dissect/build.gradle | 14 ++-- libs/geo/build.gradle | 6 +- libs/grok/build.gradle | 10 ++- libs/nio/build.gradle | 18 ++--- libs/secure-sm/build.gradle | 18 ++--- libs/ssl-config/build.gradle | 8 +-- libs/x-content/build.gradle | 18 ++--- modules/ingest-common/build.gradle | 4 +- modules/lang-painless/build.gradle | 2 +- modules/lang-painless/spi/build.gradle | 2 +- modules/reindex/build.gradle | 4 +- plugins/examples/build.gradle | 19 +++++ plugins/transport-nio/build.gradle | 2 +- qa/ccs-unavailable-clusters/build.gradle | 2 +- qa/multi-cluster-search/build.gradle | 2 +- qa/vagrant/build.gradle | 2 +- qa/wildfly/build.gradle | 2 +- server/build.gradle | 18 ++--- .../elasticsearch/bootstrap/security.policy | 2 +- settings.gradle | 72 +++++++++---------- test/framework/build.gradle | 10 +-- test/logger-usage/build.gradle | 2 +- x-pack/build.gradle | 14 ---- x-pack/docs/build.gradle | 1 - x-pack/license-tools/build.gradle | 6 +- x-pack/plugin/build.gradle | 4 +- x-pack/plugin/ccr/build.gradle | 2 +- x-pack/plugin/core/build.gradle | 4 +- x-pack/plugin/data-frame/build.gradle | 2 +- .../qa/multi-node-tests/build.gradle | 2 +- .../qa/single-node-tests/build.gradle | 2 +- x-pack/plugin/deprecation/build.gradle | 2 +- x-pack/plugin/graph/build.gradle | 1 - .../graph/qa/with-security/build.gradle | 2 +- x-pack/plugin/ilm/build.gradle | 1 - x-pack/plugin/logstash/build.gradle | 1 - x-pack/plugin/ml/build.gradle | 5 +- .../ml/qa/basic-multi-node/build.gradle | 2 +- x-pack/plugin/ml/qa/disabled/build.gradle | 2 +- .../ml/qa/ml-with-security/build.gradle | 1 - .../qa/native-multi-node-tests/build.gradle | 1 - .../ml/qa/no-bootstrap-tests/build.gradle | 2 +- .../ml/qa/single-node-tests/build.gradle | 2 +- x-pack/plugin/monitoring/build.gradle | 9 ++- x-pack/plugin/rollup/build.gradle | 5 +- x-pack/plugin/security/build.gradle | 1 - x-pack/plugin/security/cli/build.gradle | 5 +- .../qa/basic-enable-security/build.gradle | 1 - .../security/qa/security-basic/build.gradle | 1 - .../plugin/security/qa/tls-basic/build.gradle | 1 - x-pack/plugin/sql/build.gradle | 5 +- x-pack/plugin/sql/jdbc/build.gradle | 6 +- x-pack/plugin/sql/qa/build.gradle | 4 +- x-pack/plugin/sql/qa/security/build.gradle | 4 +- x-pack/plugin/sql/sql-action/build.gradle | 6 +- x-pack/plugin/sql/sql-cli/build.gradle | 4 +- x-pack/plugin/sql/sql-client/build.gradle | 2 +- x-pack/plugin/sql/sql-proto/build.gradle | 6 +- x-pack/plugin/watcher/build.gradle | 6 +- x-pack/qa/kerberos-tests/build.gradle | 2 +- x-pack/qa/oidc-op-tests/build.gradle | 1 - x-pack/qa/openldap-tests/build.gradle | 1 - .../reindex-tests-with-security/build.gradle | 1 - x-pack/qa/rolling-upgrade/build.gradle | 2 +- x-pack/qa/security-client-tests/build.gradle | 2 +- .../build.gradle | 2 +- x-pack/qa/security-migrate-tests/build.gradle | 2 +- .../build.gradle | 1 - x-pack/qa/smoke-test-plugins-ssl/build.gradle | 2 +- x-pack/qa/third-party/jira/build.gradle | 2 +- x-pack/qa/third-party/pagerduty/build.gradle | 2 +- x-pack/qa/third-party/slack/build.gradle | 2 +- x-pack/qa/transport-client-tests/build.gradle | 2 +- x-pack/test/feature-aware/build.gradle | 6 +- x-pack/transport-client/build.gradle | 4 +- 95 files changed, 277 insertions(+), 353 deletions(-) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java create mode 100644 buildSrc/src/main/resources/buildSrc.marker diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index e85f9a56086..376ad4d4e67 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -27,7 +27,7 @@ archivesBaseName = 'elasticsearch-benchmarks' test.enabled = false dependencies { - compile("org.elasticsearch:elasticsearch:${version}") { + compile(project(":server")) { // JMH ships with the conflicting version 4.6. This prevents us from using jopt-simple in benchmarks (which should be ok) but allows // us to invoke the JMH uberjar as usual. exclude group: 'net.sf.jopt-simple', module: 'jopt-simple' diff --git a/build.gradle b/build.gradle index 2a79de7e4fc..f3d683671e4 100644 --- a/build.gradle +++ b/build.gradle @@ -209,69 +209,7 @@ allprojects { javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet') } - /* Sets up the dependencies that we build as part of this project but - register as though they were external to resolve internally. We register - them as external dependencies so the build plugin that we use can be used - to build elasticsearch plugins outside of the elasticsearch source tree. */ - ext.projectSubstitutions = [ - "org.elasticsearch.gradle:build-tools:${version}": ':build-tools', - "org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec', - "org.elasticsearch:elasticsearch:${version}": ':server', - "org.elasticsearch:elasticsearch-cli:${version}": ':libs:elasticsearch-cli', - "org.elasticsearch:elasticsearch-core:${version}": ':libs:core', - "org.elasticsearch:elasticsearch-nio:${version}": ':libs:nio', - "org.elasticsearch:elasticsearch-x-content:${version}": ':libs:x-content', - "org.elasticsearch:elasticsearch-geo:${version}": ':libs:elasticsearch-geo', - "org.elasticsearch:elasticsearch-secure-sm:${version}": ':libs:secure-sm', - "org.elasticsearch:elasticsearch-ssl-config:${version}": ':libs:elasticsearch-ssl-config', - "org.elasticsearch.client:elasticsearch-rest-client:${version}": ':client:rest', - "org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}": ':client:sniffer', - "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}": ':client:rest-high-level', - "org.elasticsearch.client:test:${version}": ':client:test', - "org.elasticsearch.client:transport:${version}": ':client:transport', - "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${version}": ':modules:lang-painless:spi', - "org.elasticsearch.test:framework:${version}": ':test:framework', - "org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage', - "org.elasticsearch.xpack.test:feature-aware:${version}": ':x-pack:test:feature-aware', - // for transport client - "org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4', - "org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex', - "org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache', - "org.elasticsearch.plugin:parent-join-client:${version}": ':modules:parent-join', - "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}": ':modules:aggs-matrix-stats', - "org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator', - "org.elasticsearch.plugin:rank-eval-client:${version}": ':modules:rank-eval', - // for security example plugins - "org.elasticsearch.plugin:x-pack-core:${version}": ':x-pack:plugin:core', - "org.elasticsearch.client:x-pack-transport:${version}": ':x-pack:transport-client' - ] - - /* - * Gradle only resolve project substitutions during dependency resolution but - * we sometimes want to do the resolution at other times. This creates a - * convenient method we can call to do it. - */ - ext.dependencyToProject = { Dependency dep -> - if (dep instanceof ProjectDependency) { - return dep.dependencyProject - } else { - String substitution = projectSubstitutions.get("${dep.group}:${dep.name}:${dep.version}") - if (substitution != null) { - return findProject(substitution) - } - return null - } - } - project.afterEvaluate { - configurations.matching { it.canBeResolved }.all { - resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> - projectSubstitutions.each { k,v -> - subs.substitute(subs.module(k)).with(subs.project(v)) - } - } - } - // Handle javadoc dependencies across projects. Order matters: the linksOffline for // org.elasticsearch:elasticsearch must be the last one or all the links for the // other packages (e.g org.elasticsearch.client) will point to server rather than @@ -280,10 +218,10 @@ allprojects { String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } Closure depJavadocClosure = { shadowed, dep -> - if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) { + if ((dep instanceof ProjectDependency) == false) { return } - Project upstreamProject = project.ext.dependencyToProject(dep) + Project upstreamProject = dep.dependencyProject if (upstreamProject == null) { return } @@ -339,8 +277,8 @@ gradle.projectsEvaluated { integTest.mustRunAfter test } configurations.matching { it.canBeResolved }.all { Configuration configuration -> - dependencies.all { Dependency dep -> - Project upstreamProject = dependencyToProject(dep) + dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep -> + Project upstreamProject = dep.dependencyProject if (upstreamProject != null) { if (project.path == upstreamProject.path) { // TODO: distribution integ tests depend on themselves (!), fix that diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index d3a16f55277..7a8b901f1c5 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -148,6 +148,11 @@ if (project != rootProject) { distribution project(':distribution:archives:linux-tar') distribution project(':distribution:archives:oss-linux-tar') } + + // for external projects we want to remove the marker file indicating we are running the Elasticsearch project + processResources { + exclude 'buildSrc.marker' + } String localDownloads = "${rootProject.buildDir}/local-downloads" task setupLocalDownloads(type:Copy) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index e04d0966c41..692181710f4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -27,6 +27,7 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.tool.ClasspathUtils import org.gradle.api.InvalidUserDataException import org.gradle.api.Plugin import org.gradle.api.Project @@ -154,8 +155,13 @@ class PluginBuildPlugin implements Plugin { private static void configureDependencies(Project project) { project.dependencies { - compileOnly "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}" - testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" + if (ClasspathUtils.isElasticsearchProject()) { + compileOnly project.project(':server') + testCompile project.project(':test:framework') + } else { + compileOnly "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}" + testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" + } // we "upgrade" these optional deps to provided for plugins, since they will run // with a full elasticsearch server that includes optional deps compileOnly "org.locationtech.spatial4j:spatial4j:${project.versions.spatial4j}" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index f656f177ce6..a5d4f3fcd94 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -23,11 +23,13 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.tool.ClasspathUtils import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.plugins.quality.Checkstyle + /** * Validation tasks which should be run before committing. These run before tests. */ @@ -40,18 +42,18 @@ class PrecommitTasks { public static Task create(Project project, boolean includeDependencyLicenses) { project.configurations.create("forbiddenApisCliJar") project.dependencies { - forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.6') + forbiddenApisCliJar('de.thetaphi:forbiddenapis:2.6') } List precommitTasks = [ - configureCheckstyle(project), - configureForbiddenApisCli(project), - project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), - project.tasks.create('licenseHeaders', LicenseHeadersTask.class), - project.tasks.create('filepermissions', FilePermissionsTask.class), - configureJarHell(project), - configureThirdPartyAudit(project), - configureTestingConventions(project) + configureCheckstyle(project), + configureForbiddenApisCli(project), + project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), + project.tasks.create('licenseHeaders', LicenseHeadersTask.class), + project.tasks.create('filepermissions', FilePermissionsTask.class), + configureJarHell(project), + configureThirdPartyAudit(project), + configureTestingConventions(project) ] // tasks with just tests don't need dependency licenses, so this flag makes adding @@ -85,10 +87,10 @@ class PrecommitTasks { } return project.tasks.create([ - name: 'precommit', - group: JavaBasePlugin.VERIFICATION_GROUP, - description: 'Runs all non-test checks.', - dependsOn: precommitTasks + name : 'precommit', + group : JavaBasePlugin.VERIFICATION_GROUP, + description: 'Runs all non-test checks.', + dependsOn : precommitTasks ]) } @@ -168,7 +170,7 @@ class PrecommitTasks { ) } } - Task forbiddenApis = project.tasks.getByName("forbiddenApis") + Task forbiddenApis = project.tasks.getByName("forbiddenApis") forbiddenApis.group = "" return forbiddenApis } @@ -211,7 +213,7 @@ class PrecommitTasks { project.checkstyle { config = project.resources.text.fromFile(checkstyleConf, 'UTF-8') configProperties = [ - suppressions: checkstyleSuppressions + suppressions: checkstyleSuppressions ] toolVersion = CHECKSTYLE_VERSION } @@ -229,9 +231,11 @@ class PrecommitTasks { } private static Task configureLoggerUsage(Project project) { + Object dependency = ClasspathUtils.isElasticsearchProject() ? project.project(':test:logger-usage') : + "org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}" + project.configurations.create('loggerUsagePlugin') - project.dependencies.add('loggerUsagePlugin', - "org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}") + project.dependencies.add('loggerUsagePlugin', dependency) return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) { classpath = project.configurations.loggerUsagePlugin } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 0ded69756eb..2fe80c2fc47 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -249,7 +249,7 @@ class RestIntegTestTask extends DefaultTask { restSpec } project.dependencies { - restSpec "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}" + restSpec project.project(':rest-api-spec') } Task copyRestSpec = project.tasks.findByName('copyRestSpec') if (copyRestSpec != null) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index c9a26eb74b5..f3ebfecc322 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -79,7 +79,7 @@ class StandaloneRestTestPlugin implements Plugin { // create a compileOnly configuration as others might expect it project.configurations.create("compileOnly") - project.dependencies.add('testCompile', "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}") + project.dependencies.add('testCompile', project.project(':test:framework')) EclipseModel eclipse = project.extensions.getByType(EclipseModel) eclipse.classpath.sourceSets = [testSourceSet] diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java new file mode 100644 index 00000000000..40ec6bd7183 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java @@ -0,0 +1,23 @@ +package org.elasticsearch.gradle.tool; + +public class ClasspathUtils { + private static boolean isElasticsearchProject; + + static { + // look for buildSrc marker file, if it exists then we are running in the context of the elastic/elasticsearch build + isElasticsearchProject = ClasspathUtils.class.getResource("/buildSrc.marker") != null; + } + + private ClasspathUtils() { + } + + /** + * Determine if we are running in the context of the `elastic/elasticsearch` project. This method will return {@code false} when + * the build-tools project is pulled in as an external dependency. + * + * @return if we are currently running in the `elastic/elasticsearch` project + */ + public static boolean isElasticsearchProject() { + return isElasticsearchProject; + } +} diff --git a/buildSrc/src/main/resources/buildSrc.marker b/buildSrc/src/main/resources/buildSrc.marker new file mode 100644 index 00000000000..e69de29bb2d diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle index a53f1020340..eb2c634f972 100644 --- a/client/benchmark/build.gradle +++ b/client/benchmark/build.gradle @@ -34,12 +34,12 @@ test.enabled = false dependencies { compile 'org.apache.commons:commons-math3:3.2' - compile("org.elasticsearch.client:elasticsearch-rest-client:${version}") + compile project(":client:rest") // bottleneck should be the client, not Elasticsearch compile project(path: ':client:client-benchmark-noop-api-plugin') // for transport client - compile("org.elasticsearch:elasticsearch:${version}") - compile("org.elasticsearch.client:transport:${version}") + compile project(":server") + compile project(":client:transport") compile project(path: ':modules:transport-netty4', configuration: 'runtime') compile project(path: ':modules:reindex', configuration: 'runtime') compile project(path: ':modules:lang-mustache', configuration: 'runtime') diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index a5035a70bce..77e656b4826 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -50,24 +50,24 @@ dependencies { * Everything in the "shadow" configuration is *not* copied into the * shadowJar. */ - compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" - compile "org.elasticsearch.plugin:parent-join-client:${version}" - compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" - compile "org.elasticsearch.plugin:rank-eval-client:${version}" - compile "org.elasticsearch.plugin:lang-mustache-client:${version}" + compile project(':server') + compile project(':client:rest') + compile project(':modules:parent-join') + compile project(':modules:aggs-matrix-stats') + compile project(':modules:rank-eval') + compile project(':modules:lang-mustache') - testCompile "org.elasticsearch.client:test:${version}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(':client:test') + testCompile project(':test:framework') testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" //this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs - testCompile "org.elasticsearch:rest-api-spec:${version}" - // Needed for serialization tests: + testCompile project(":rest-api-spec") + // Needed for serialization tests: // (In order to serialize a server side class to a client side class or the other way around) - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') - restSpec "org.elasticsearch:rest-api-spec:${version}" + restSpec project(':rest-api-spec') } //we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) diff --git a/client/rest/build.gradle b/client/rest/build.gradle index ee031745711..352b15699d0 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -44,7 +44,7 @@ dependencies { compile "commons-codec:commons-codec:${versions.commonscodec}" compile "commons-logging:commons-logging:${versions.commonslogging}" - testCompile "org.elasticsearch.client:test:${version}" + testCompile project(":client:test") testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" @@ -68,7 +68,7 @@ forbiddenApisTest { } // JarHell is part of es server, which we don't want to pull in -// TODO: Not anymore. Now in :libs:core +// TODO: Not anymore. Now in :libs:elasticsearch-core jarHell.enabled=false testingConventions { diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 2f9eeca6020..8b4d21537a1 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -35,14 +35,14 @@ publishing { } dependencies { - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" + compile project(":client:rest") compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile "org.elasticsearch.client:test:${version}" + testCompile project(":client:test") testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.elasticsearch:securemock:${versions.securemock}" @@ -68,7 +68,7 @@ dependencyLicenses { } // JarHell is part of es server, which we don't want to pull in -// TODO: Not anymore. Now in :libs:core +// TODO: Not anymore. Now in :libs:elasticsearch-core jarHell.enabled=false testingConventions { diff --git a/client/test/build.gradle b/client/test/build.gradle index 184606e3607..169172736ec 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -44,7 +44,7 @@ forbiddenApisTest { } // JarHell is part of es server, which we don't want to pull in -// TODO: Not anymore. Now in :libs:core +// TODO: Not anymore. Now in :libs:elasticsearch-core jarHell.enabled=false // TODO: should we have licenses for our test deps? diff --git a/client/transport/build.gradle b/client/transport/build.gradle index c1e4503445b..36bd6740182 100644 --- a/client/transport/build.gradle +++ b/client/transport/build.gradle @@ -23,13 +23,13 @@ apply plugin: 'nebula.maven-scm' group = 'org.elasticsearch.client' dependencies { - compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch.plugin:transport-netty4-client:${version}" - compile "org.elasticsearch.plugin:reindex-client:${version}" - compile "org.elasticsearch.plugin:lang-mustache-client:${version}" - compile "org.elasticsearch.plugin:percolator-client:${version}" - compile "org.elasticsearch.plugin:parent-join-client:${version}" - compile "org.elasticsearch.plugin:rank-eval-client:${version}" + compile project(":server") + compile project(":modules:transport-netty4") + compile project(":modules:reindex") + compile project(":modules:lang-mustache") + compile project(":modules:percolator") + compile project(":modules:parent-join") + compile project(":modules:rank-eval") testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" diff --git a/distribution/build.gradle b/distribution/build.gradle index a92b157eaba..940cbd0369a 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -241,7 +241,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { // delay by using closures, since they have not yet been configured, so no jar task exists yet from { project(':server').jar } from { project(':server').configurations.runtime } - from { project(':libs:plugin-classloader').jar } + from { project(':libs:elasticsearch-plugin-classloader').jar } from { project(':distribution:tools:java-version-checker').jar } from { project(':distribution:tools:launchers').jar } into('tools/plugin-cli') { diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index 48bc899cd29..3db958c6ec4 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -22,11 +22,11 @@ apply plugin: 'elasticsearch.build' archivesBaseName = 'elasticsearch-plugin-cli' dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly "org.elasticsearch:elasticsearch-cli:${version}" + compileOnly project(":server") + compileOnly project(":libs:elasticsearch-cli") compile "org.bouncycastle:bcpg-jdk15on:${versions.bouncycastle}" compile "org.bouncycastle:bcprov-jdk15on:${versions.bouncycastle}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") testCompile 'com.google.jimfs:jimfs:1.1' testCompile 'com.google.guava:guava:18.0' } diff --git a/libs/build.gradle b/libs/build.gradle index b0924aa1f54..03b5d2c611e 100644 --- a/libs/build.gradle +++ b/libs/build.gradle @@ -26,19 +26,19 @@ subprojects { /* * Subprojects may depend on the "core" lib but may not depend on any - * other libs. This keeps are dependencies simpler. + * other libs. This keeps our dependencies simpler. */ project.afterEvaluate { configurations.all { Configuration conf -> - dependencies.all { Dependency dep -> - Project depProject = dependencyToProject(dep) + dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep -> + Project depProject = dep.dependencyProject if (depProject != null - && false == depProject.path.equals(':libs:core') + && false == depProject.path.equals(':libs:elasticsearch-core') && false == isEclipse && depProject.path.startsWith(':libs')) { throw new InvalidUserDataException("projects in :libs " + "may not depend on other projects libs except " - + ":libs:core but " + + ":libs:elasticsearch-core but " + "${project.path} depends on ${depProject.path}") } } diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index b1f3b338255..a97c62096a5 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -23,7 +23,7 @@ apply plugin: 'nebula.maven-scm' dependencies { compile 'net.sf.jopt-simple:jopt-simple:5.0.2' - compile "org.elasticsearch:elasticsearch-core:${version}" + compile project(':libs:elasticsearch-core') } test.enabled = false diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 36c40f747d6..046c3d070e8 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -82,14 +82,14 @@ dependencies { } if (isEclipse == false || project.path == ":libs:core-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { + testCompile(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-core' } } } forbiddenApisMain { - // :libs:core does not depend on server + // :libs:elasticsearch-core does not depend on server // TODO: Need to decide how we want to handle for forbidden signatures with the changes to server replaceSignatureFiles 'jdk-signatures' } @@ -97,7 +97,7 @@ forbiddenApisMain { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:core") { + if (project.path == ":libs:elasticsearch-core") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/dissect/build.gradle b/libs/dissect/build.gradle index 853c78646c2..7e71f86f64f 100644 --- a/libs/dissect/build.gradle +++ b/libs/dissect/build.gradle @@ -17,17 +17,15 @@ * under the License. */ -archivesBaseName = 'elasticsearch-dissect' - dependencies { - if (isEclipse == false || project.path == ":libs:dissect-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { - exclude group: 'org.elasticsearch', module: 'dissect' + if (isEclipse == false || project.path == ":libs:elasticsearch-dissect-tests") { + testCompile(project(":test:framework")) { + exclude group: 'org.elasticsearch', module: 'elasticsearch-dissect' } } testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") - testCompile("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") + testCompile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" + testCompile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" } forbiddenApisMain { @@ -37,7 +35,7 @@ forbiddenApisMain { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:dissect") { + if (project.path == ":libs:elasticsearch-dissect") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/geo/build.gradle b/libs/geo/build.gradle index ab3419b93b9..e2e5a11d535 100644 --- a/libs/geo/build.gradle +++ b/libs/geo/build.gradle @@ -22,8 +22,8 @@ apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' dependencies { - if (isEclipse == false || project.path == ":libs:geo-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { + if (isEclipse == false || project.path == ":libs:elasticsearch-geo-tests") { + testCompile(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-geo' } } @@ -38,7 +38,7 @@ forbiddenApisMain { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:geo") { + if (project.path == ":libs:elasticsearch-geo") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index 9ca02df35aa..ca363480519 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -17,16 +17,14 @@ * under the License. */ -archivesBaseName = 'elasticsearch-grok' - dependencies { compile 'org.jruby.joni:joni:2.1.6' // joni dependencies: compile 'org.jruby.jcodings:jcodings:1.0.12' - if (isEclipse == false || project.path == ":libs:grok-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { - exclude group: 'org.elasticsearch', module: 'grok' + if (isEclipse == false || project.path == ":libs:elasticsearch-grok-tests") { + testCompile(project(":test:framework")) { + exclude group: 'org.elasticsearch', module: 'elasticsearch-grok' } } } @@ -38,7 +36,7 @@ forbiddenApisMain { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:grok") { + if (project.path == ":libs:elasticsearch-grok") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/nio/build.gradle b/libs/nio/build.gradle index 66436bb040e..d6d0eaea0ab 100644 --- a/libs/nio/build.gradle +++ b/libs/nio/build.gradle @@ -19,25 +19,15 @@ apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -archivesBaseName = 'elasticsearch-nio' - -publishing { - publications { - nebula { - artifactId = archivesBaseName - } - } -} - dependencies { - compile "org.elasticsearch:elasticsearch-core:${version}" + compile project(':libs:elasticsearch-core') testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" - if (isEclipse == false || project.path == ":libs:nio-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { + if (isEclipse == false || project.path == ":libs:elasticsearch-nio-tests") { + testCompile(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-nio' } } @@ -46,7 +36,7 @@ dependencies { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:nio") { + if (project.path == ":libs:elasticsearch-nio") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/secure-sm/build.gradle b/libs/secure-sm/build.gradle index bbd44afc70a..3e79d9ee2e8 100644 --- a/libs/secure-sm/build.gradle +++ b/libs/secure-sm/build.gradle @@ -19,16 +19,6 @@ apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -archivesBaseName = 'elasticsearch-secure-sm' - -publishing { - publications { - nebula { - artifactId = archivesBaseName - } - } -} - dependencies { // do not add non-test compile dependencies to secure-sm without a good reason to do so @@ -36,9 +26,9 @@ dependencies { testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" - if (isEclipse == false || project.path == ":libs:secure-sm-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { - exclude group: 'org.elasticsearch', module: 'secure-sm' + if (isEclipse == false || project.path == ":libs:elasticsearch-secure-sm-tests") { + testCompile(project(":test:framework")) { + exclude group: 'org.elasticsearch', module: 'elasticsearch-secure-sm' } } } @@ -50,7 +40,7 @@ forbiddenApisMain { if (isEclipse) { // in Eclipse the project is under a fake root so we need to change around the source sets sourceSets { - if (project.path == ":libs:secure-sm") { + if (project.path == ":libs:elasticsearch-secure-sm") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/ssl-config/build.gradle b/libs/ssl-config/build.gradle index 860cdcd9e6f..71ebd642a43 100644 --- a/libs/ssl-config/build.gradle +++ b/libs/ssl-config/build.gradle @@ -19,10 +19,10 @@ apply plugin: "nebula.maven-scm" dependencies { - compile "org.elasticsearch:elasticsearch-core:${version}" + compile project(':libs:elasticsearch-core') - if (isEclipse == false || project.path == ":libs:ssl-config-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { + if (isEclipse == false || project.path == ":libs:elasticsearch-ssl-config-tests") { + testCompile(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-ssl-config' } } @@ -35,7 +35,7 @@ dependencies { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:ssl-config") { + if (project.path == ":libs:elasticsearch-ssl-config") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/libs/x-content/build.gradle b/libs/x-content/build.gradle index 0e99d80da1e..e54427c0583 100644 --- a/libs/x-content/build.gradle +++ b/libs/x-content/build.gradle @@ -21,18 +21,8 @@ apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -archivesBaseName = 'elasticsearch-x-content' - -publishing { - publications { - nebula { - artifactId = archivesBaseName - } - } -} - dependencies { - compile "org.elasticsearch:elasticsearch-core:${version}" + compile project(':libs:elasticsearch-core') compile "org.yaml:snakeyaml:${versions.snakeyaml}" compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" @@ -44,8 +34,8 @@ dependencies { testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" - if (isEclipse == false || project.path == ":libs:x-content-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { + if (isEclipse == false || project.path == ":libs:elasticsearch-x-content-tests") { + testCompile(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-x-content' } } @@ -61,7 +51,7 @@ forbiddenApisMain { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:x-content") { + if (project.path == ":libs:elasticsearch-x-content") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index a94c375afc3..00c444e50e8 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -25,6 +25,6 @@ esplugin { dependencies { compileOnly project(':modules:lang-painless') - compile project(':libs:grok') - compile project(':libs:dissect') + compile project(':libs:elasticsearch-grok') + compile project(':libs:elasticsearch-dissect') } \ No newline at end of file diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 1f6b722ec30..6cce3850232 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -69,7 +69,7 @@ sourceSets { } dependencies { - docCompile "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}" + docCompile project(':server') docCompile project(':modules:lang-painless') } diff --git a/modules/lang-painless/spi/build.gradle b/modules/lang-painless/spi/build.gradle index 7e43a242a23..3f25f247a2b 100644 --- a/modules/lang-painless/spi/build.gradle +++ b/modules/lang-painless/spi/build.gradle @@ -33,7 +33,7 @@ publishing { } dependencies { - compile "org.elasticsearch:elasticsearch:${version}" + compile project(":server") } // no tests...yet? diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 260c8dcc1df..17cd8b26263 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -55,8 +55,8 @@ test { } dependencies { - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" - compile "org.elasticsearch:elasticsearch-ssl-config:${version}" + compile project(":client:rest") + compile project(":libs:elasticsearch-ssl-config") // for http - testing reindex from remote testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') // for parent/child testing diff --git a/plugins/examples/build.gradle b/plugins/examples/build.gradle index 2b9f3c6433d..cd2739f035f 100644 --- a/plugins/examples/build.gradle +++ b/plugins/examples/build.gradle @@ -7,3 +7,22 @@ gradle.projectsEvaluated { } } } + +configure(project('painless-whitelist')) { + configurations.all { + resolutionStrategy.dependencySubstitution { + substitute module('org.elasticsearch.plugin:elasticsearch-scripting-painless-spi') with project(':modules:lang-painless:spi') + substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage') + } + } +} + +configure(project('security-authorization-engine')) { + configurations.all { + resolutionStrategy.dependencySubstitution { + substitute module('org.elasticsearch.plugin:x-pack-core') with project(':x-pack:plugin:core') + substitute module('org.elasticsearch.client:x-pack-transport') with project(':x-pack:transport-client') + substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage') + } + } +} \ No newline at end of file diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index a982758482c..f3b96a3ebe3 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -25,7 +25,7 @@ esplugin { } dependencies { - compile "org.elasticsearch:elasticsearch-nio:${version}" + compile project(':libs:elasticsearch-nio') // network stack compile "io.netty:netty-buffer:${versions.netty}" diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index c1f2bc96271..ea80ee983b8 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -21,5 +21,5 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test-with-dependencies' dependencies { - testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}" + testCompile project(":client:rest-high-level") } diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index bca12be6754..7f923d03f71 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -22,7 +22,7 @@ import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}" + testCompile project(":client:rest-high-level") } task remoteClusterTest(type: RestIntegTestTask) { diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 7c342436dd3..f5cfcdda03c 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -35,7 +35,7 @@ dependencies { compile "commons-codec:commons-codec:${versions.commonscodec}" compile "commons-logging:commons-logging:${versions.commonslogging}" - compile project(':libs:core') + compile project(':libs:elasticsearch-core') // pulls in the jar built by this project and its dependencies packagingTest project(path: project.path, configuration: 'runtime') diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index dcbf5253bb0..8e5e8ed635d 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -72,7 +72,7 @@ dependencies { compile "org.apache.logging.log4j:log4j-core:${versions.log4j}" compile project(path: ':client:transport', configuration: 'runtime') wildfly "org.jboss:wildfly:${wildflyVersion}@zip" - testCompile "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}" + testCompile project(':test:framework') } task unzipWildfly(type: Sync) { diff --git a/server/build.gradle b/server/build.gradle index bf9d85e8766..1e75edf251a 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -74,13 +74,13 @@ if (!isEclipse && !isIdea) { dependencies { - compile "org.elasticsearch:elasticsearch-core:${version}" - compile "org.elasticsearch:elasticsearch-secure-sm:${version}" - compile "org.elasticsearch:elasticsearch-x-content:${version}" - compile "org.elasticsearch:elasticsearch-geo:${version}" + compile project(':libs:elasticsearch-core') + compile project(':libs:elasticsearch-secure-sm') + compile project(':libs:elasticsearch-x-content') + compile project(":libs:elasticsearch-geo") - compileOnly project(':libs:plugin-classloader') - testRuntime project(':libs:plugin-classloader') + compileOnly project(':libs:elasticsearch-plugin-classloader') + testRuntime project(':libs:elasticsearch-plugin-classloader') // lucene compile "org.apache.lucene:lucene-core:${versions.lucene}" @@ -100,7 +100,7 @@ dependencies { compile "org.apache.lucene:lucene-suggest:${versions.lucene}" // utilities - compile "org.elasticsearch:elasticsearch-cli:${version}" + compile project(":libs:elasticsearch-cli") compile 'com.carrotsearch:hppc:0.7.1' // time handling, remove with java 8 time @@ -127,9 +127,9 @@ dependencies { } if (isEclipse == false || project.path == ":server-tests") { - testCompile("org.elasticsearch.test:framework:${version}") { + testCompile(project(":test:framework")) { // tests use the locally compiled version of server - exclude group: 'org.elasticsearch', module: 'elasticsearch' + exclude group: 'org.elasticsearch', module: 'server' } } testCompile 'com.google.jimfs:jimfs:1.1' diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index 4df99ef6f88..fbfa0f39b16 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -47,7 +47,7 @@ grant codeBase "${codebase.lucene-misc}" { permission java.nio.file.LinkPermission "hard"; }; -grant codeBase "${codebase.plugin-classloader}" { +grant codeBase "${codebase.elasticsearch-plugin-classloader}" { // needed to create the classloader which allows plugins to extend other plugins permission java.lang.RuntimePermission "createClassLoader"; }; diff --git a/settings.gradle b/settings.gradle index 7532230e7b8..c20ad55a462 100644 --- a/settings.gradle +++ b/settings.gradle @@ -117,43 +117,47 @@ include projects.toArray(new String[0]) project(':build-tools').projectDir = new File(rootProject.projectDir, 'buildSrc') +project(":libs").children.each { libsProject -> + libsProject.name = "elasticsearch-${libsProject.name}" +} + if (isEclipse) { project(":server").projectDir = new File(rootProject.projectDir, 'server/src/main') project(":server").buildFileName = 'eclipse-build.gradle' project(":server-tests").projectDir = new File(rootProject.projectDir, 'server/src/test') project(":server-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:core").projectDir = new File(rootProject.projectDir, 'libs/core/src/main') - project(":libs:core").buildFileName = 'eclipse-build.gradle' - project(":libs:core-tests").projectDir = new File(rootProject.projectDir, 'libs/core/src/test') - project(":libs:core-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:dissect").projectDir = new File(rootProject.projectDir, 'libs/dissect/src/main') - project(":libs:dissect").buildFileName = 'eclipse-build.gradle' - project(":libs:dissect-tests").projectDir = new File(rootProject.projectDir, 'libs/dissect/src/test') - project(":libs:dissect-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:nio").projectDir = new File(rootProject.projectDir, 'libs/nio/src/main') - project(":libs:nio").buildFileName = 'eclipse-build.gradle' - project(":libs:nio-tests").projectDir = new File(rootProject.projectDir, 'libs/nio/src/test') - project(":libs:nio-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:x-content").projectDir = new File(rootProject.projectDir, 'libs/x-content/src/main') - project(":libs:x-content").buildFileName = 'eclipse-build.gradle' - project(":libs:x-content-tests").projectDir = new File(rootProject.projectDir, 'libs/x-content/src/test') - project(":libs:x-content-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:secure-sm").projectDir = new File(rootProject.projectDir, 'libs/secure-sm/src/main') - project(":libs:secure-sm").buildFileName = 'eclipse-build.gradle' - project(":libs:secure-sm-tests").projectDir = new File(rootProject.projectDir, 'libs/secure-sm/src/test') - project(":libs:secure-sm-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:grok").projectDir = new File(rootProject.projectDir, 'libs/grok/src/main') - project(":libs:grok").buildFileName = 'eclipse-build.gradle' - project(":libs:grok-tests").projectDir = new File(rootProject.projectDir, 'libs/grok/src/test') - project(":libs:grok-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:geo").projectDir = new File(rootProject.projectDir, 'libs/geo/src/main') - project(":libs:geo").buildFileName = 'eclipse-build.gradle' - project(":libs:geo-tests").projectDir = new File(rootProject.projectDir, 'libs/geo/src/test') - project(":libs:geo-tests").buildFileName = 'eclipse-build.gradle' - project(":libs:ssl-config").projectDir = new File(rootProject.projectDir, 'libs/ssl-config/src/main') - project(":libs:ssl-config").buildFileName = 'eclipse-build.gradle' - project(":libs:ssl-config-tests").projectDir = new File(rootProject.projectDir, 'libs/ssl-config/src/test') - project(":libs:ssl-config-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-core").projectDir = new File(rootProject.projectDir, 'libs/core/src/main') + project(":libs:elasticsearch-core").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-core-tests").projectDir = new File(rootProject.projectDir, 'libs/core/src/test') + project(":libs:elasticsearch-core-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-dissect").projectDir = new File(rootProject.projectDir, 'libs/dissect/src/main') + project(":libs:elasticsearch-dissect").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-dissect-tests").projectDir = new File(rootProject.projectDir, 'libs/dissect/src/test') + project(":libs:elasticsearch-dissect-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-nio").projectDir = new File(rootProject.projectDir, 'libs/nio/src/main') + project(":libs:elasticsearch-nio").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-nio-tests").projectDir = new File(rootProject.projectDir, 'libs/nio/src/test') + project(":libs:elasticsearch-nio-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-x-content").projectDir = new File(rootProject.projectDir, 'libs/x-content/src/main') + project(":libs:elasticsearch-x-content").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-x-content-tests").projectDir = new File(rootProject.projectDir, 'libs/x-content/src/test') + project(":libs:elasticsearch-x-content-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-secure-sm").projectDir = new File(rootProject.projectDir, 'libs/secure-sm/src/main') + project(":libs:elasticsearch-secure-sm").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-secure-sm-tests").projectDir = new File(rootProject.projectDir, 'libs/secure-sm/src/test') + project(":libs:elasticsearch-secure-sm-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-grok").projectDir = new File(rootProject.projectDir, 'libs/grok/src/main') + project(":libs:elasticsearch-grok").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-grok-tests").projectDir = new File(rootProject.projectDir, 'libs/grok/src/test') + project(":libs:elasticsearch-grok-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-geo").projectDir = new File(rootProject.projectDir, 'libs/geo/src/main') + project(":libs:elasticsearch-geo").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-geo-tests").projectDir = new File(rootProject.projectDir, 'libs/geo/src/test') + project(":libs:elasticsearch-geo-tests").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-ssl-config").projectDir = new File(rootProject.projectDir, 'libs/ssl-config/src/main') + project(":libs:elasticsearch-ssl-config").buildFileName = 'eclipse-build.gradle' + project(":libs:elasticsearch-ssl-config-tests").projectDir = new File(rootProject.projectDir, 'libs/ssl-config/src/test') + project(":libs:elasticsearch-ssl-config-tests").buildFileName = 'eclipse-build.gradle' } // look for extra plugins for elasticsearch @@ -163,7 +167,3 @@ if (extraProjects.exists()) { addSubProjects('', extraProjectDir) } } - -project(":libs:cli").name = 'elasticsearch-cli' -project(":libs:geo").name = 'elasticsearch-geo' -project(":libs:ssl-config").name = 'elasticsearch-ssl-config' diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 18978bd2d75..9cabdb82bf3 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -18,11 +18,11 @@ */ dependencies { - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" - compile "org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}" - compile "org.elasticsearch:elasticsearch-nio:${version}" - compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch:elasticsearch-cli:${version}" + compile project(":client:rest") + compile project(":client:sniffer") + compile project(':libs:elasticsearch-nio') + compile project(":server") + compile project(":libs:elasticsearch-cli") compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" compile "junit:junit:${versions.junit}" compile "org.hamcrest:hamcrest:${versions.hamcrest}" diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index 8e374d2cf6a..0fa2ee3bfd9 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -20,7 +20,7 @@ dependencies { compile 'org.ow2.asm:asm-debug-all:5.0.4' // use asm-debug-all as asm-all is broken compile "org.apache.logging.log4j:log4j-api:${versions.log4j}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") } loggerUsageCheck.enabled = false diff --git a/x-pack/build.gradle b/x-pack/build.gradle index 9c90bbbbfc2..f9b13f07618 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -26,17 +26,3 @@ subprojects { project.ext.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') project.ext.noticeFile = xpackRootProject.file('NOTICE.txt') } - -subprojects { - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ccr:${version}": xpackModule('ccr')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-core:${version}": xpackModule('core')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-deprecation:${version}": xpackModule('deprecation')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-graph:${version}": xpackModule('graph')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ilm:${version}": xpackModule('ilm')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-logstash:${version}": xpackModule('logstash')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ml:${version}": xpackModule('ml')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-monitoring:${version}": xpackModule('monitoring')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-security:${version}": xpackModule('security')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-sql:${version}": xpackModule('sql')] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-watcher:${version}": xpackModule('watcher')] -} diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index efe08ff3b29..1450012601d 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -18,7 +18,6 @@ buildRestTests.expectedUnconvertedCandidates = [ ] dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') diff --git a/x-pack/license-tools/build.gradle b/x-pack/license-tools/build.gradle index 4bd17713a2f..2a032dc0cd1 100644 --- a/x-pack/license-tools/build.gradle +++ b/x-pack/license-tools/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.build' dependencies { - compile "org.elasticsearch.plugin:x-pack-core:${version}" - compile "org.elasticsearch:elasticsearch:${version}" - testCompile "org.elasticsearch.test:framework:${version}" + compile project(':x-pack:plugin:core') + compile project(':server') + testCompile project(':test:framework') } project.forbiddenPatterns { diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 6ce71982f5b..0aa9767c4ff 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -19,9 +19,7 @@ subprojects { // see the root Gradle file for additional logic regarding this configuration project.configurations.create('featureAwarePlugin') project.dependencies.add('featureAwarePlugin', project.configurations.compileClasspath) - project.dependencies.add( - 'featureAwarePlugin', - "org.elasticsearch.xpack.test:feature-aware:${org.elasticsearch.gradle.VersionProperties.elasticsearch}") + project.dependencies.add('featureAwarePlugin', project(':x-pack:test:feature-aware')) project.dependencies.add('featureAwarePlugin', project.sourceSets.main.output.getClassesDirs()) final Task featureAwareTask = project.tasks.create("featureAwareCheck", LoggedExec) { diff --git a/x-pack/plugin/ccr/build.gradle b/x-pack/plugin/ccr/build.gradle index a808a7197cc..7f07db1d540 100644 --- a/x-pack/plugin/ccr/build.gradle +++ b/x-pack/plugin/ccr/build.gradle @@ -48,7 +48,7 @@ gradle.projectsEvaluated { } dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" + compileOnly project(":server") compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 832b0af18fb..06c9e2c109a 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -23,7 +23,7 @@ dependencyLicenses { } dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" + compileOnly project(":server") compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" @@ -95,7 +95,7 @@ forbiddenApisMain { if (isEclipse) { // in eclipse the project is under a fake root, we need to change around the source sets sourceSets { - if (project.path == ":libs:core") { + if (project.path == ":libs:elasticsearch-core") { main.java.srcDirs = ['java'] main.resources.srcDirs = ['resources'] } else { diff --git a/x-pack/plugin/data-frame/build.gradle b/x-pack/plugin/data-frame/build.gradle index e065f72e998..03c89994e97 100644 --- a/x-pack/plugin/data-frame/build.gradle +++ b/x-pack/plugin/data-frame/build.gradle @@ -9,7 +9,7 @@ esplugin { } dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" + compileOnly project(":server") compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle b/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle index 3637454e5f5..950d818ee4c 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle @@ -5,7 +5,7 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('data-frame'), configuration: 'runtime') - testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${versions.elasticsearch}" + testCompile project(':client:rest-high-level') } // location for keys and certificates diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle b/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle index 0a79f293061..7571495d96c 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle @@ -5,7 +5,7 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('data-frame'), configuration: 'runtime') - testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${versions.elasticsearch}" + testCompile project(':client:rest-high-level') } integTestCluster { diff --git a/x-pack/plugin/deprecation/build.gradle b/x-pack/plugin/deprecation/build.gradle index 62d2a891929..bbf235131d7 100644 --- a/x-pack/plugin/deprecation/build.gradle +++ b/x-pack/plugin/deprecation/build.gradle @@ -10,7 +10,7 @@ esplugin { archivesBaseName = 'x-pack-deprecation' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(":x-pack:plugin:core") } integTest.enabled = false diff --git a/x-pack/plugin/graph/build.gradle b/x-pack/plugin/graph/build.gradle index e7b0b44fd65..0b96516dd73 100644 --- a/x-pack/plugin/graph/build.gradle +++ b/x-pack/plugin/graph/build.gradle @@ -10,7 +10,6 @@ esplugin { archivesBaseName = 'x-pack-graph' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/graph/qa/with-security/build.gradle b/x-pack/plugin/graph/qa/with-security/build.gradle index f0f819b46d4..a79d72d0c7b 100644 --- a/x-pack/plugin/graph/qa/with-security/build.gradle +++ b/x-pack/plugin/graph/qa/with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(":x-pack:plugin:core") } // bring in graph rest test suite diff --git a/x-pack/plugin/ilm/build.gradle b/x-pack/plugin/ilm/build.gradle index e6962e3c3bf..d29d9053de3 100644 --- a/x-pack/plugin/ilm/build.gradle +++ b/x-pack/plugin/ilm/build.gradle @@ -13,7 +13,6 @@ esplugin { archivesBaseName = 'x-pack-ilm' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/logstash/build.gradle b/x-pack/plugin/logstash/build.gradle index 476d3f17cad..aad286db95a 100644 --- a/x-pack/plugin/logstash/build.gradle +++ b/x-pack/plugin/logstash/build.gradle @@ -10,7 +10,6 @@ esplugin { archivesBaseName = 'x-pack-logstash' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 9bd4d445892..660a09c6b94 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -49,15 +49,14 @@ compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try, compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(':modules:lang-painless:spi') compileOnly project(path: xpackModule('core'), configuration: 'default') - compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // This should not be here testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') // ml deps - compile project(':libs:grok') + compile project(':libs:elasticsearch-grok') compile "com.ibm.icu:icu4j:${versions.icu4j}" compile "net.sf.supercsv:super-csv:${versions.supercsv}" nativeBundle "org.elasticsearch.ml:ml-cpp:${project.version}@zip" diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index b47016c1344..fc27aa97d7a 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(":x-pack:plugin:core") testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index 2aa5d47acef..ee49189ae1a 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(":x-pack:plugin:core") testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index bc0b0ca5b7b..8d7f799d5a2 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -2,7 +2,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 22fd7837628..c6776156c94 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -2,7 +2,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ml'), configuration: 'runtime') diff --git a/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle b/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle index 9eac3fdd37a..1908fb8e092 100644 --- a/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle +++ b/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle @@ -1,6 +1,6 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(":x-pack:plugin:core") testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index f856c3d4c5f..a51e0a3141c 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(":x-pack:plugin:core") testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle index b2e0c930e0d..6e45c830e2d 100644 --- a/x-pack/plugin/monitoring/build.gradle +++ b/x-pack/plugin/monitoring/build.gradle @@ -10,18 +10,17 @@ esplugin { archivesBaseName = 'x-pack-monitoring' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // monitoring deps - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" - compile "org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}" + compile project(':client:rest') + compile project(':client:sniffer') // baz - this goes away after we separate out the actions #27759 - testCompile "org.elasticsearch.plugin:x-pack-watcher:${version}" + testCompile project(xpackModule('watcher')) - testCompile "org.elasticsearch.plugin:x-pack-ilm:${version}" + testCompile project(xpackModule('ilm')) } compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index d159f3334b9..4650927ad87 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -14,9 +14,8 @@ compileTestJava.options.compilerArgs << "-Xlint:-rawtypes" dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" - - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(":server") + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index e7e9c0fa71f..4bd70b83783 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -13,7 +13,6 @@ esplugin { archivesBaseName = 'x-pack-security' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 205815bda8c..af4f0ce7ed1 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -5,13 +5,12 @@ apply plugin: 'elasticsearch.build' archivesBaseName = 'elasticsearch-security-cli' dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(":server") compileOnly project(path: xpackModule('core'), configuration: 'default') compile "org.bouncycastle:bcpkix-jdk15on:${versions.bouncycastle}" compile "org.bouncycastle:bcprov-jdk15on:${versions.bouncycastle}" testImplementation 'com.google.jimfs:jimfs:1.1' - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle index a21e3c68d3f..27532cfb7f1 100644 --- a/x-pack/plugin/security/qa/basic-enable-security/build.gradle +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -4,7 +4,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index 864a1e51809..e005aeb9e8c 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -4,7 +4,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/security/qa/tls-basic/build.gradle b/x-pack/plugin/security/qa/tls-basic/build.gradle index 9f5ef26f6e6..78c67daf729 100644 --- a/x-pack/plugin/security/qa/tls-basic/build.gradle +++ b/x-pack/plugin/security/qa/tls-basic/build.gradle @@ -4,7 +4,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index b996f069b4c..c828f7e346b 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -38,16 +38,15 @@ task internalClusterTest(type: Test) { check.dependsOn internalClusterTest dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly(project(':modules:lang-painless')) { // exclude ASM to not affect featureAware task on Java 10+ exclude group: "org.ow2.asm" } compile project('sql-action') - compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" + compile project(':modules:aggs-matrix-stats') compile "org.antlr:antlr4-runtime:4.5.3" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(':test:framework') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: ':modules:reindex', configuration: 'runtime') diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 22186976d6f..fe85cb473e3 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -18,15 +18,15 @@ dependencies { compile (xpackProject('plugin:sql:sql-proto')) { transitive = false } - compile (project(':libs:x-content')) { + compile (project(':libs:elasticsearch-x-content')) { transitive = false } compile (project(':libs:elasticsearch-geo')) { transitive = false } - compile project(':libs:core') + compile project(':libs:elasticsearch-core') runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/sql/qa/build.gradle b/x-pack/plugin/sql/qa/build.gradle index f2a6acd61a0..14bc1faa3fa 100644 --- a/x-pack/plugin/sql/qa/build.gradle +++ b/x-pack/plugin/sql/qa/build.gradle @@ -6,7 +6,7 @@ archivesBaseName = 'qa-sql' group = "org.elasticsearch.x-pack.qa.sql" dependencies { - compile "org.elasticsearch.test:framework:${version}" + compile project(":test:framework") // JDBC testing dependencies compile project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') @@ -59,7 +59,7 @@ subprojects { testCompile(xpackProject('plugin:sql:qa')) { transitive = false } - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") // JDBC testing dependencies testRuntime "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}" diff --git a/x-pack/plugin/sql/qa/security/build.gradle b/x-pack/plugin/sql/qa/security/build.gradle index a0e6e82ed4d..33a4963c103 100644 --- a/x-pack/plugin/sql/qa/security/build.gradle +++ b/x-pack/plugin/sql/qa/security/build.gradle @@ -1,5 +1,5 @@ dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') } Project mainProject = project @@ -26,7 +26,7 @@ subprojects { } dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(":x-pack:plugin:core") } integTestCluster { diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index 86a028186f4..defbf3dac85 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -13,10 +13,10 @@ dependencies { compile (project(':server')) { transitive = false } - compile (project(':libs:core')) { + compile (project(':libs:elasticsearch-core')) { transitive = false } - compile (project(':libs:x-content')) { + compile (project(':libs:elasticsearch-x-content')) { transitive = false } compile xpackProject('plugin:sql:sql-proto') @@ -26,7 +26,7 @@ dependencies { runtime "org.apache.logging.log4j:log4j-api:${versions.log4j}" runtime "org.apache.logging.log4j:log4j-core:${versions.log4j}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") } forbiddenApisMain { diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 6f57ea279c5..927d165c2d2 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -25,10 +25,10 @@ dependencies { compile xpackProject('plugin:sql:sql-client') compile xpackProject('plugin:sql:sql-action') - compile "org.elasticsearch:elasticsearch-cli:${version}" + compile project(":libs:elasticsearch-cli") runtime "org.elasticsearch:jna:${versions.jna}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") } dependencyLicenses { diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index 613ca73a4db..cc6f097880e 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -10,7 +10,7 @@ description = 'Code shared between jdbc and cli' dependencies { compile xpackProject('plugin:sql:sql-proto') compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") } dependencyLicenses { diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index b1c055a0dfc..af890d82968 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -8,15 +8,15 @@ description = 'Request and response objects shared by the cli, jdbc ' + 'and the Elasticsearch plugin' dependencies { - compile (project(':libs:core')) { + compile (project(':libs:elasticsearch-core')) { transitive = false } - compile (project(':libs:x-content')) { + compile (project(':libs:elasticsearch-x-content')) { transitive = false } runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(":test:framework") } forbiddenApisMain { diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index e236b75ee2c..bfd447adc26 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -23,15 +23,13 @@ dependencyLicenses { } dependencies { - compileOnly "org.elasticsearch:elasticsearch:${version}" - - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(':server') compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - testCompile "org.elasticsearch.plugin:x-pack-ilm:${version}" + testCompile project(xpackModule('ilm')) // watcher deps compile 'com.googlecode.owasp-java-html-sanitizer:owasp-java-html-sanitizer:r239' diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 88248f89b72..8f3268aa9fd 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -12,7 +12,7 @@ testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" integTest.enabled = false dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index 52e581f60a5..7b053d9da63 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -5,7 +5,6 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test.fixtures' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 5305699b9a0..9fc5a9b3b31 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -2,7 +2,6 @@ apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.test.fixtures' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index b0ae65b3448..d831707a996 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -4,7 +4,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 471503e385d..2f567b5cfdc 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -11,7 +11,7 @@ test.enabled = false dependencies { testCompile project(':x-pack:qa') - testCompile ("org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}") + testCompile project(':client:rest-high-level') } Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> diff --git a/x-pack/qa/security-client-tests/build.gradle b/x-pack/qa/security-client-tests/build.gradle index 556e36e5146..deed4521f7b 100644 --- a/x-pack/qa/security-client-tests/build.gradle +++ b/x-pack/qa/security-client-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(xpackModule('core')) testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-example-spi-extension/build.gradle b/x-pack/qa/security-example-spi-extension/build.gradle index 1ff65519c36..f997f9cb871 100644 --- a/x-pack/qa/security-example-spi-extension/build.gradle +++ b/x-pack/qa/security-example-spi-extension/build.gradle @@ -8,7 +8,7 @@ esplugin { } dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(':x-pack:plugin:core') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-migrate-tests/build.gradle b/x-pack/qa/security-migrate-tests/build.gradle index 1851f0e21b0..d11a5ab2836 100644 --- a/x-pack/qa/security-migrate-tests/build.gradle +++ b/x-pack/qa/security-migrate-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(xpackModule('core')) testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-setup-password-tests/build.gradle b/x-pack/qa/security-setup-password-tests/build.gradle index a99fa2d5438..2cca5f40b8c 100644 --- a/x-pack/qa/security-setup-password-tests/build.gradle +++ b/x-pack/qa/security-setup-password-tests/build.gradle @@ -2,7 +2,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index e88eac3028f..da448691dfc 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') } String outputDir = "${buildDir}/generated-resources/${project.name}" diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index 43667300a33..c01f6f129b9 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index 9013d8c2815..69c98484470 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index 9fdfaeb8266..956631714c0 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(':x-pack:plugin:core') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/transport-client-tests/build.gradle b/x-pack/qa/transport-client-tests/build.gradle index 5ca96eb0d7a..12f0f4dc024 100644 --- a/x-pack/qa/transport-client-tests/build.gradle +++ b/x-pack/qa/transport-client-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(xpackModule('core')) testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/test/feature-aware/build.gradle b/x-pack/test/feature-aware/build.gradle index e3c7ae96063..9d7f1504418 100644 --- a/x-pack/test/feature-aware/build.gradle +++ b/x-pack/test/feature-aware/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.build' dependencies { compile 'org.ow2.asm:asm:7.1' - compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile "org.elasticsearch.test:framework:${version}" + compile project(':server') + compile project(':x-pack:plugin:core') + testCompile project(':test:framework') } forbiddenApisMain.enabled = true diff --git a/x-pack/transport-client/build.gradle b/x-pack/transport-client/build.gradle index d764ef89744..080ad83c495 100644 --- a/x-pack/transport-client/build.gradle +++ b/x-pack/transport-client/build.gradle @@ -8,8 +8,8 @@ archivesBaseName = 'x-pack-transport' dependencies { // this "api" dependency looks weird, but it is correct, as it contains // all of x-pack for now, and transport client will be going away in the future. - compile "org.elasticsearch.plugin:x-pack-core:${version}" - compile "org.elasticsearch.client:transport:${version}" + compile project(xpackModule('core')) + compile project(':client:transport') testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" From 72eb9c2d44ba88a1592918491bd39c41feb84883 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 4 Jun 2019 11:53:26 -0700 Subject: [PATCH 060/210] Eclipse libs projects setup fix (#42852) Fallout from #42773 for eclipse users. (cherry picked from commit 998419c49fe51eb8343664a80f07d8d8d39abc6a) --- libs/core/src/test/eclipse-build.gradle | 2 +- libs/dissect/src/test/eclipse-build.gradle | 2 +- libs/grok/src/test/eclipse-build.gradle | 2 +- libs/nio/src/test/eclipse-build.gradle | 2 +- libs/secure-sm/src/test/eclipse-build.gradle | 2 +- libs/x-content/src/test/eclipse-build.gradle | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/libs/core/src/test/eclipse-build.gradle b/libs/core/src/test/eclipse-build.gradle index b5fe0417428..9fb1d2cac39 100644 --- a/libs/core/src/test/eclipse-build.gradle +++ b/libs/core/src/test/eclipse-build.gradle @@ -2,5 +2,5 @@ apply from: '../../build.gradle' dependencies { - testCompile project(':libs:core') + testCompile project(':libs:elasticsearch-core') } diff --git a/libs/dissect/src/test/eclipse-build.gradle b/libs/dissect/src/test/eclipse-build.gradle index 56d632f23b1..c10fea5e2b7 100644 --- a/libs/dissect/src/test/eclipse-build.gradle +++ b/libs/dissect/src/test/eclipse-build.gradle @@ -3,5 +3,5 @@ apply from: '../../build.gradle' dependencies { - testCompile project(':libs:dissect') + testCompile project(':libs:elasticsearch-dissect') } diff --git a/libs/grok/src/test/eclipse-build.gradle b/libs/grok/src/test/eclipse-build.gradle index c5d791c1663..606aaffa121 100644 --- a/libs/grok/src/test/eclipse-build.gradle +++ b/libs/grok/src/test/eclipse-build.gradle @@ -3,5 +3,5 @@ apply from: '../../build.gradle' dependencies { - testCompile project(':libs:grok') + testCompile project(':libs:elasticsearch-grok') } diff --git a/libs/nio/src/test/eclipse-build.gradle b/libs/nio/src/test/eclipse-build.gradle index e30e76b0da5..f7e152311ad 100644 --- a/libs/nio/src/test/eclipse-build.gradle +++ b/libs/nio/src/test/eclipse-build.gradle @@ -3,5 +3,5 @@ apply from: '../../build.gradle' dependencies { - testCompile project(':libs:nio') + testCompile project(':libs:elasticsearch-nio') } diff --git a/libs/secure-sm/src/test/eclipse-build.gradle b/libs/secure-sm/src/test/eclipse-build.gradle index 56dcdcbac27..63d610b75c1 100644 --- a/libs/secure-sm/src/test/eclipse-build.gradle +++ b/libs/secure-sm/src/test/eclipse-build.gradle @@ -3,5 +3,5 @@ apply from: '../../build.gradle' dependencies { - testCompile project(':libs:secure-sm') + testCompile project(':libs:elasticsearch-secure-sm') } diff --git a/libs/x-content/src/test/eclipse-build.gradle b/libs/x-content/src/test/eclipse-build.gradle index f456f71a4c3..a8d81f9f958 100644 --- a/libs/x-content/src/test/eclipse-build.gradle +++ b/libs/x-content/src/test/eclipse-build.gradle @@ -3,5 +3,5 @@ apply from: '../../build.gradle' dependencies { - testCompile project(':libs:x-content') + testCompile project(':libs:elasticsearch-x-content') } From aad1b3a2a0d825c6ecb7e753772ca8ef55600760 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 4 Jun 2019 18:21:20 -0400 Subject: [PATCH 061/210] Fix version parsing in various tests (#42871) This commit fixes the version parsing in various tests. The issue here is that the parsing was relying on java.version. However, java.version can contain additional characters such as -ea for early access builds. See JEP 233: Name Syntax ------------------------------ -------------- java.version $VNUM(\-$PRE)? java.runtime.version $VSTR java.vm.version $VSTR java.specification.version $VNUM java.vm.specification.version $VNUM Instead, we want java.specification.version. --- .../org/elasticsearch/client/RestClientBuilderIntegTests.java | 2 +- .../azure/classic/AzureDiscoveryClusterFormationTests.java | 3 ++- .../xpack/monitoring/exporter/http/HttpExporterSslIT.java | 3 ++- .../xpack/security/authc/saml/SamlRealmTests.java | 3 ++- .../java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java | 3 ++- .../watcher/actions/webhook/WebhookHttpsIntegrationTests.java | 3 ++- .../xpack/watcher/common/http/HttpClientTests.java | 3 ++- 7 files changed, 13 insertions(+), 7 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 780cc447ba8..ca6443f6c6b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -134,7 +134,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase { * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK. */ private static String getProtocol() { - String version = AccessController.doPrivileged((PrivilegedAction) () -> System.getProperty("java.version")); + String version = AccessController.doPrivileged((PrivilegedAction) () -> System.getProperty("java.specification.version")); String[] components = version.split("\\."); if (components.length > 0) { final int major = Integer.valueOf(components[0]); diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index 8bfb373f644..7f45708c76a 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -281,7 +281,8 @@ public class AzureDiscoveryClusterFormationTests extends ESIntegTestCase { return "TLSv1.2"; } else { JavaVersion full = - AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + AccessController.doPrivileged( + (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.specification.version"))); if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { return "TLSv1.2"; } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java index 9e16f669ae8..333388358be 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java @@ -205,7 +205,8 @@ public class HttpExporterSslIT extends MonitoringIntegTestCase { return Collections.singletonList("TLSv1.2"); } else { JavaVersion full = - AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + AccessController.doPrivileged( + (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.specification.version"))); if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { return Collections.singletonList("TLSv1.2"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java index aea50691119..32e436f1d77 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java @@ -733,7 +733,8 @@ public class SamlRealmTests extends SamlTestCase { return Collections.singletonList("TLSv1.2"); } else { JavaVersion full = - AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + AccessController.doPrivileged( + (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.specification.version"))); if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { return Collections.singletonList("TLSv1.2"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index 85f18ddff92..3deee4b68c5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -213,7 +213,8 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { return XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; } JavaVersion full = - AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + AccessController.doPrivileged( + (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.specification.version"))); if (full.compareTo(JavaVersion.parse("11.0.3")) < 0) { return Collections.singletonList("TLSv1.2"); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index d93657acdc0..985e26e7665 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -151,7 +151,8 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest return Collections.singletonList("TLSv1.2"); } else { JavaVersion full = - AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + AccessController.doPrivileged( + (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.specification.version"))); if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { return Collections.singletonList("TLSv1.2"); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 3ae96499b6a..5edcd10935e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -760,7 +760,8 @@ public class HttpClientTests extends ESTestCase { return Collections.singletonList("TLSv1.2"); } else { JavaVersion full = - AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + AccessController.doPrivileged( + (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.specification.version"))); if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { return Collections.singletonList("TLSv1.2"); } From 117df87b2b7304ee23a3f86e102f656a6df175fb Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 4 Jun 2019 20:36:24 -0400 Subject: [PATCH 062/210] Replicate aliases in cross-cluster replication (#42875) This commit adds functionality so that aliases that are manipulated on leader indices are replicated by the shard follow tasks to the follower indices. Note that we ignore write indices. This is due to the fact that follower indices do not receive direct writes so the concept is not useful. Relates #41815 --- .../client/ccr/IndicesFollowStats.java | 80 ++-- .../client/ccr/CcrStatsResponseTests.java | 3 + .../client/ccr/FollowStatsResponseTests.java | 2 + .../ccr/apis/follow/get-follow-stats.asciidoc | 5 + .../reference/ccr/apis/get-ccr-stats.asciidoc | 2 + .../cluster/metadata/AliasMetaData.java | 8 +- .../xpack/ccr/ESCCRRestTestCase.java | 5 + .../xpack/ccr/action/ShardChangesAction.java | 40 +- .../xpack/ccr/action/ShardFollowNodeTask.java | 67 ++- .../ccr/action/ShardFollowTasksExecutor.java | 123 ++++++ .../elasticsearch/xpack/ccr/CcrAliasesIT.java | 413 ++++++++++++++++++ .../ccr/action/ShardChangesResponseTests.java | 2 + .../ShardFollowNodeTaskRandomTests.java | 42 +- .../ShardFollowNodeTaskStatusTests.java | 2 + .../ccr/action/ShardFollowNodeTaskTests.java | 154 ++++++- .../ShardFollowTaskReplicationTests.java | 17 +- .../xpack/ccr/action/StatsResponsesTests.java | 1 + .../ccr/FollowStatsMonitoringDocTests.java | 4 + .../core/ccr/ShardFollowNodeTaskStatus.java | 31 +- .../src/main/resources/monitoring-es.json | 3 + .../WaitForFollowShardTasksStepTests.java | 1 + .../indexlifecycle/CCRIndexLifecycleIT.java | 79 +++- 22 files changed, 984 insertions(+), 100 deletions(-) create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrAliasesIT.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java index 7d3af08577b..ae6217d1674 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java @@ -101,6 +101,7 @@ public final class IndicesFollowStats { static final ParseField WRITE_BUFFER_SIZE_IN_BYTES_FIELD = new ParseField("write_buffer_size_in_bytes"); static final ParseField FOLLOWER_MAPPING_VERSION_FIELD = new ParseField("follower_mapping_version"); static final ParseField FOLLOWER_SETTINGS_VERSION_FIELD = new ParseField("follower_settings_version"); + static final ParseField FOLLOWER_ALIASES_VERSION_FIELD = new ParseField("follower_aliases_version"); static final ParseField TOTAL_READ_TIME_MILLIS_FIELD = new ParseField("total_read_time_millis"); static final ParseField TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD = new ParseField("total_read_remote_exec_time_millis"); static final ParseField SUCCESSFUL_READ_REQUESTS_FIELD = new ParseField("successful_read_requests"); @@ -117,41 +118,42 @@ public final class IndicesFollowStats { @SuppressWarnings("unchecked") static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>( - "shard-follow-stats", - true, - args -> new ShardFollowStats( - (String) args[0], - (String) args[1], - (String) args[2], - (int) args[3], - (long) args[4], - (long) args[5], - (long) args[6], - (long) args[7], - (long) args[8], - (int) args[9], - (int) args[10], - (int) args[11], - (long) args[12], - (long) args[13], - (long) args[14], - (long) args[15], - (long) args[16], - (long) args[17], - (long) args[18], - (long) args[19], - (long) args[20], - (long) args[21], - (long) args[22], - (long) args[23], - (long) args[24], - (long) args[25], - new TreeMap<>( - ((List>>) args[26]) - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), - (ElasticsearchException) args[27])); + new ConstructingObjectParser<>( + "shard-follow-stats", + true, + args -> new ShardFollowStats( + (String) args[0], + (String) args[1], + (String) args[2], + (int) args[3], + (long) args[4], + (long) args[5], + (long) args[6], + (long) args[7], + (long) args[8], + (int) args[9], + (int) args[10], + (int) args[11], + (long) args[12], + (long) args[13], + (long) args[14], + (long) args[15], + (long) args[16], + (long) args[17], + (long) args[18], + (long) args[19], + (long) args[20], + (long) args[21], + (long) args[22], + (long) args[23], + (long) args[24], + (long) args[25], + (long) args[26], + new TreeMap<>( + ((List>>) args[27]) + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), + (ElasticsearchException) args[28])); static final ConstructingObjectParser>, Void> READ_EXCEPTIONS_ENTRY_PARSER = new ConstructingObjectParser<>( @@ -175,6 +177,7 @@ public final class IndicesFollowStats { PARSER.declareLong(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_SIZE_IN_BYTES_FIELD); PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAPPING_VERSION_FIELD); PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_SETTINGS_VERSION_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_ALIASES_VERSION_FIELD); PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_TIME_MILLIS_FIELD); PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_READ_REQUESTS_FIELD); @@ -220,6 +223,7 @@ public final class IndicesFollowStats { private final long writeBufferSizeInBytes; private final long followerMappingVersion; private final long followerSettingsVersion; + private final long followerAliasesVersion; private final long totalReadTimeMillis; private final long totalReadRemoteExecTimeMillis; private final long successfulReadRequests; @@ -249,6 +253,7 @@ public final class IndicesFollowStats { long writeBufferSizeInBytes, long followerMappingVersion, long followerSettingsVersion, + long followerAliasesVersion, long totalReadTimeMillis, long totalReadRemoteExecTimeMillis, long successfulReadRequests, @@ -277,6 +282,7 @@ public final class IndicesFollowStats { this.writeBufferSizeInBytes = writeBufferSizeInBytes; this.followerMappingVersion = followerMappingVersion; this.followerSettingsVersion = followerSettingsVersion; + this.followerAliasesVersion = followerAliasesVersion; this.totalReadTimeMillis = totalReadTimeMillis; this.totalReadRemoteExecTimeMillis = totalReadRemoteExecTimeMillis; this.successfulReadRequests = successfulReadRequests; @@ -352,6 +358,10 @@ public final class IndicesFollowStats { return followerSettingsVersion; } + public long getFollowerAliasesVersion() { + return followerAliasesVersion; + } + public long getTotalReadTimeMillis() { return totalReadTimeMillis; } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java index d56b762520c..eaf6103a0ec 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java @@ -106,6 +106,7 @@ public class CcrStatsResponseTests extends AbstractResponseTestCase implements To if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - AliasMetaData that = (AliasMetaData) o; + final AliasMetaData that = (AliasMetaData) o; if (alias != null ? !alias.equals(that.alias) : that.alias != null) return false; if (filter != null ? !filter.equals(that.filter) : that.filter != null) return false; if (indexRouting != null ? !indexRouting.equals(that.indexRouting) : that.indexRouting != null) return false; - if (searchRouting != null ? !searchRouting.equals(that.searchRouting) : that.searchRouting != null) - return false; - if (writeIndex != null ? writeIndex != that.writeIndex : that.writeIndex != null) - return false; + if (searchRouting != null ? !searchRouting.equals(that.searchRouting) : that.searchRouting != null) return false; + if (writeIndex != null ? writeIndex != that.writeIndex : that.writeIndex != null) return false; return true; } diff --git a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java index 33e7c2f2bf1..b555e5f4411 100644 --- a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java +++ b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java @@ -143,6 +143,7 @@ public class ESCCRRestTestCase extends ESRestTestCase { int followerMaxSeqNo = 0; int followerMappingVersion = 0; int followerSettingsVersion = 0; + int followerAliasesVersion = 0; List hits = (List) XContentMapValues.extractValue("hits.hits", response); assertThat(hits.size(), greaterThanOrEqualTo(1)); @@ -164,11 +165,15 @@ public class ESCCRRestTestCase extends ESRestTestCase { int foundFollowerSettingsVersion = (int) XContentMapValues.extractValue("_source.ccr_stats.follower_settings_version", hit); followerSettingsVersion = Math.max(followerSettingsVersion, foundFollowerSettingsVersion); + int foundFollowerAliasesVersion = + (int) XContentMapValues.extractValue("_source.ccr_stats.follower_aliases_version", hit); + followerAliasesVersion = Math.max(followerAliasesVersion, foundFollowerAliasesVersion); } assertThat(followerMaxSeqNo, greaterThan(0)); assertThat(followerMappingVersion, greaterThan(0)); assertThat(followerSettingsVersion, greaterThan(0)); + assertThat(followerAliasesVersion, greaterThan(0)); } protected static void verifyAutoFollowMonitoring() throws IOException { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 33b8a274431..521a1eaae82 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ccr.action; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; @@ -219,6 +220,12 @@ public class ShardChangesAction extends Action { return settingsVersion; } + private long aliasesVersion; + + public long getAliasesVersion() { + return aliasesVersion; + } + private long globalCheckpoint; public long getGlobalCheckpoint() { @@ -256,6 +263,11 @@ public class ShardChangesAction extends Action { super(in); mappingVersion = in.readVLong(); settingsVersion = in.readVLong(); + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + aliasesVersion = in.readVLong(); + } else { + aliasesVersion = 0; + } globalCheckpoint = in.readZLong(); maxSeqNo = in.readZLong(); maxSeqNoOfUpdatesOrDeletes = in.readZLong(); @@ -264,16 +276,17 @@ public class ShardChangesAction extends Action { } Response( - final long mappingVersion, - final long settingsVersion, - final long globalCheckpoint, - final long maxSeqNo, - final long maxSeqNoOfUpdatesOrDeletes, - final Translog.Operation[] operations, - final long tookInMillis) { - + final long mappingVersion, + final long settingsVersion, + final long aliasesVersion, + final long globalCheckpoint, + final long maxSeqNo, + final long maxSeqNoOfUpdatesOrDeletes, + final Translog.Operation[] operations, + final long tookInMillis) { this.mappingVersion = mappingVersion; this.settingsVersion = settingsVersion; + this.aliasesVersion = aliasesVersion; this.globalCheckpoint = globalCheckpoint; this.maxSeqNo = maxSeqNo; this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; @@ -291,6 +304,9 @@ public class ShardChangesAction extends Action { super.writeTo(out); out.writeVLong(mappingVersion); out.writeVLong(settingsVersion); + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + out.writeVLong(aliasesVersion); + } out.writeZLong(globalCheckpoint); out.writeZLong(maxSeqNo); out.writeZLong(maxSeqNoOfUpdatesOrDeletes); @@ -305,6 +321,7 @@ public class ShardChangesAction extends Action { final Response that = (Response) o; return mappingVersion == that.mappingVersion && settingsVersion == that.settingsVersion && + aliasesVersion == that.aliasesVersion && globalCheckpoint == that.globalCheckpoint && maxSeqNo == that.maxSeqNo && maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes && @@ -317,6 +334,7 @@ public class ShardChangesAction extends Action { return Objects.hash( mappingVersion, settingsVersion, + aliasesVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, @@ -361,9 +379,11 @@ public class ShardChangesAction extends Action { final IndexMetaData indexMetaData = indexService.getMetaData(); final long mappingVersion = indexMetaData.getMappingVersion(); final long settingsVersion = indexMetaData.getSettingsVersion(); + final long aliasesVersion = indexMetaData.getAliasesVersion(); return getResponse( mappingVersion, settingsVersion, + aliasesVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations, @@ -436,12 +456,14 @@ public class ShardChangesAction extends Action { final long mappingVersion = indexMetaData.getMappingVersion(); final long settingsVersion = indexMetaData.getSettingsVersion(); + final long aliasesVersion = indexMetaData.getAliasesVersion(); final SeqNoStats latestSeqNoStats = indexShard.seqNoStats(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); listener.onResponse( getResponse( mappingVersion, settingsVersion, + aliasesVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY, @@ -541,6 +563,7 @@ public class ShardChangesAction extends Action { static Response getResponse( final long mappingVersion, final long settingsVersion, + final long aliasesVersion, final SeqNoStats seqNoStats, final long maxSeqNoOfUpdates, final Translog.Operation[] operations, @@ -550,6 +573,7 @@ public class ShardChangesAction extends Action { return new Response( mappingVersion, settingsVersion, + aliasesVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 0ee86a6058c..4ad0fb1dfd0 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -78,6 +78,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private int numOutstandingWrites = 0; private long currentMappingVersion = 0; private long currentSettingsVersion = 0; + private long currentAliasesVersion = 0; private long totalReadRemoteExecTimeMillis = 0; private long totalReadTimeMillis = 0; private long successfulReadRequests = 0; @@ -154,15 +155,27 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { synchronized (ShardFollowNodeTask.this) { currentSettingsVersion = leaderSettingsVersion; } + }); + updateAliases(leaderAliasesVersion -> { + synchronized (ShardFollowNodeTask.this) { + currentAliasesVersion = leaderAliasesVersion; + } + }); + synchronized (ShardFollowNodeTask.this) { LOGGER.info( - "{} following leader shard {}, follower global checkpoint=[{}], mapping version=[{}], settings version=[{}]", + "{} following leader shard {}, " + + "follower global checkpoint=[{}], " + + "mapping version=[{}], " + + "settings version=[{}], " + + "aliases version=[{}]", params.getFollowShardId(), params.getLeaderShardId(), followerGlobalCheckpoint, - leaderMappingVersion, - leaderSettingsVersion); - coordinateReads(); - }); + currentMappingVersion, + currentSettingsVersion, + currentAliasesVersion); + } + coordinateReads(); }); } @@ -306,12 +319,14 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { // In order to process this read response (3), we need to check and potentially update the follow index's setting (1) and // check and potentially update the follow index's mappings (2). - // 3) handle read response: + // 4) handle read response: Runnable handleResponseTask = () -> innerHandleReadResponse(from, maxRequiredSeqNo, response); - // 2) update follow index mapping: + // 3) update follow index mapping: Runnable updateMappingsTask = () -> maybeUpdateMapping(response.getMappingVersion(), handleResponseTask); - // 1) update follow index settings: - maybeUpdateSettings(response.getSettingsVersion(), updateMappingsTask); + // 2) update follow index settings: + Runnable updateSettingsTask = () -> maybeUpdateSettings(response.getSettingsVersion(), updateMappingsTask); + // 1) update follow index aliases: + maybeUpdateAliases(response.getAliasesVersion(), updateSettingsTask); } void handleFallenBehindLeaderShard(Exception e, long from, int maxOperationCount, long maxRequiredSeqNo, AtomicInteger retryCounter) { @@ -423,7 +438,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private synchronized void maybeUpdateSettings(final Long minimumRequiredSettingsVersion, Runnable task) { if (currentSettingsVersion >= minimumRequiredSettingsVersion) { - LOGGER.trace("{} settings version [{}] is higher or equal than minimum required mapping version [{}]", + LOGGER.trace("{} settings version [{}] is higher or equal than minimum required settings version [{}]", params.getFollowShardId(), currentSettingsVersion, minimumRequiredSettingsVersion); task.run(); } else { @@ -436,6 +451,27 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } } + private synchronized void maybeUpdateAliases(final Long minimumRequiredAliasesVersion, final Runnable task) { + if (currentAliasesVersion >= minimumRequiredAliasesVersion) { + LOGGER.trace( + "{} aliases version [{}] is higher or equal than minimum required aliases version [{}]", + params.getFollowShardId(), + currentAliasesVersion, + minimumRequiredAliasesVersion); + task.run(); + } else { + LOGGER.trace( + "{} updating aliases, aliases version [{}] is lower than minimum required aliases version [{}]", + params.getFollowShardId(), + currentAliasesVersion, + minimumRequiredAliasesVersion); + updateAliases(aliasesVersion -> { + currentAliasesVersion = aliasesVersion; + task.run(); + }); + } + } + private void updateMapping(long minRequiredMappingVersion, LongConsumer handler) { updateMapping(minRequiredMappingVersion, handler, new AtomicInteger(0)); } @@ -453,6 +489,14 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { innerUpdateSettings(handler, e -> handleFailure(e, retryCounter, () -> updateSettings(handler, retryCounter))); } + private void updateAliases(final LongConsumer handler) { + updateAliases(handler, new AtomicInteger()); + } + + private void updateAliases(final LongConsumer handler, final AtomicInteger retryCounter) { + innerUpdateAliases(handler, e -> handleFailure(e, retryCounter, () -> updateAliases(handler, retryCounter))); + } + private void handleFailure(Exception e, AtomicInteger retryCounter, Runnable task) { assert e != null; if (shouldRetry(params.getRemoteCluster(), e)) { @@ -511,6 +555,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { protected abstract void innerUpdateSettings(LongConsumer handler, Consumer errorHandler); + protected abstract void innerUpdateAliases(LongConsumer handler, Consumer errorHandler); + protected abstract void innerSendBulkShardOperationsRequest(String followerHistoryUUID, List operations, long leaderMaxSeqNoOfUpdatesOrDeletes, @@ -566,6 +612,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { bufferSizeInBytes, currentMappingVersion, currentSettingsVersion, + currentAliasesVersion, totalReadTimeMillis, totalReadRemoteExecTimeMillis, successfulReadRequests, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 595303d0bce..fee95e44828 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ccr.action; +import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; @@ -14,6 +15,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -26,6 +28,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -62,7 +65,9 @@ import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsAction; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsRequest; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsResponse; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -200,6 +205,124 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor errorHandler) { + /* + * The strategy for updating the aliases is fairly simple. We look at the aliases that exist on the leader, and those that + * exist on the follower. We partition these aliases into three sets: the aliases that exist on both the leader and the + * follower, the aliases that are on the leader only, and the aliases that are on the follower only. + * + * For the aliases that are on the leader and the follower, we compare the aliases and add an action to overwrite the + * follower view of the alias if the aliases are different. If the aliases are the same, we skip the alias. Note that the + * meaning of equals here intentionally ignores the write index. There are two reasons for this. First, follower indices + * do not receive direct writes so conceptually the write index is not useful. Additionally, there is a larger challenge. + * Suppose that we did copy over the write index from the leader to the follower. On the leader, when the write index is + * swapped from one index to another, this is done atomically. However, to do this on the follower, we would have to step + * outside the shard follow tasks framework and have a separate framework for copying aliases over. This is because if we + * try to manage the aliases including the write aliases with the shard follow tasks, we do not have a way to move the write + * index atomically (since we have a single-index view here only) and therefore we can end up in situations where we would + * try to assign the write index to two indices. Further, trying to do this outside the shard follow tasks framework has + * problems too, since it could be that the new aliases arrive on the coordinator before the write index has even been + * created on the local cluster. So there are race conditions either way. All of this put together means that we will simply + * ignore the write index. + * + * For aliases that are on the leader but not the follower, we copy those aliases over to the follower. + * + * For aliases that are on the follower but not the leader, we remove those aliases from the follower. + */ + final Index leaderIndex = params.getLeaderShardId().getIndex(); + final Index followerIndex = params.getFollowShardId().getIndex(); + + final ClusterStateRequest clusterStateRequest = CcrRequests.metaDataRequest(leaderIndex.getName()); + + final CheckedConsumer onResponse = clusterStateResponse -> { + final IndexMetaData leaderIndexMetaData = clusterStateResponse.getState().metaData().getIndexSafe(leaderIndex); + final IndexMetaData followerIndexMetaData = clusterService.state().metaData().getIndexSafe(followerIndex); + + // partition the aliases into the three sets + final HashSet aliasesOnLeaderNotOnFollower = new HashSet<>(); + final HashSet aliasesInCommon = new HashSet<>(); + final HashSet aliasesOnFollowerNotOnLeader = new HashSet<>(); + + for (final ObjectCursor aliasName : leaderIndexMetaData.getAliases().keys()) { + if (followerIndexMetaData.getAliases().containsKey(aliasName.value)) { + aliasesInCommon.add(aliasName.value); + } else { + aliasesOnLeaderNotOnFollower.add(aliasName.value); + } + } + + for (final ObjectCursor aliasName : followerIndexMetaData.getAliases().keys()) { + if (leaderIndexMetaData.getAliases().containsKey(aliasName.value)) { + assert aliasesInCommon.contains(aliasName.value) : aliasName.value; + } else { + aliasesOnFollowerNotOnLeader.add(aliasName.value); + } + } + + final List aliasActions = new ArrayList<>(); + + // add the aliases the follower does not have + for (final String aliasName : aliasesOnLeaderNotOnFollower) { + final AliasMetaData alias = leaderIndexMetaData.getAliases().get(aliasName); + // we intentionally override that the alias is not a write alias as follower indices do not receive direct writes + aliasActions.add(IndicesAliasesRequest.AliasActions.add() + .index(followerIndex.getName()) + .alias(alias.alias()) + .filter(alias.filter() == null ? null : alias.filter().toString()) + .indexRouting(alias.indexRouting()) + .searchRouting(alias.searchRouting()) + .writeIndex(false)); + } + + // update the aliases that are different (ignoring write aliases) + for (final String aliasName : aliasesInCommon) { + final AliasMetaData leaderAliasMetaData = leaderIndexMetaData.getAliases().get(aliasName); + // we intentionally override that the alias is not a write alias as follower indices do not receive direct writes + final AliasMetaData leaderAliasMetaDataWithoutWriteIndex = new AliasMetaData.Builder(aliasName) + .filter(leaderAliasMetaData.filter()) + .indexRouting(leaderAliasMetaData.indexRouting()) + .searchRouting(leaderAliasMetaData.searchRouting()) + .writeIndex(false) + .build(); + final AliasMetaData followerAliasMetaData = followerIndexMetaData.getAliases().get(aliasName); + if (leaderAliasMetaDataWithoutWriteIndex.equals(followerAliasMetaData)) { + // skip this alias, the leader and follower have the same modulo the write index + continue; + } + // we intentionally override that the alias is not a write alias as follower indices do not receive direct writes + aliasActions.add(IndicesAliasesRequest.AliasActions.add() + .index(followerIndex.getName()) + .alias(leaderAliasMetaData.alias()) + .filter(leaderAliasMetaData.filter() == null ? null : leaderAliasMetaData.filter().toString()) + .indexRouting(leaderAliasMetaData.indexRouting()) + .searchRouting(leaderAliasMetaData.searchRouting()) + .writeIndex(false)); + } + + // remove aliases that the leader no longer has + for (final String aliasName : aliasesOnFollowerNotOnLeader) { + aliasActions.add(IndicesAliasesRequest.AliasActions.remove().index(followerIndex.getName()).alias(aliasName)); + } + + final IndicesAliasesRequest request = new IndicesAliasesRequest(); + if (aliasActions.isEmpty()) { + handler.accept(leaderIndexMetaData.getAliasesVersion()); + } else { + aliasActions.forEach(request::addAliasAction); + followerClient.admin().indices().aliases( + request, + ActionListener.wrap(r -> handler.accept(leaderIndexMetaData.getAliasesVersion()), errorHandler)); + } + }; + + try { + remoteClient(params).admin().cluster().state(clusterStateRequest, ActionListener.wrap(onResponse, errorHandler)); + } catch (final NoSuchRemoteClusterException e) { + errorHandler.accept(e); + } + } + private void closeIndexUpdateSettingsAndOpenIndex(String followIndex, Settings updatedSettings, Runnable handler, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrAliasesIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrAliasesIT.java new file mode 100644 index 00000000000..e1e955470e4 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrAliasesIT.java @@ -0,0 +1,413 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.CheckedRunnable; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.xpack.CcrIntegTestCase; +import org.elasticsearch.xpack.ccr.action.ShardFollowTask; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class CcrAliasesIT extends CcrIntegTestCase { + + public void testAliasOnIndexCreation() throws Exception { + final String aliasName = randomAlphaOfLength(16); + final String aliases; + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + { + builder.startObject("aliases"); + { + builder.startObject(aliasName); + { + + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + aliases = BytesReference.bytes(builder).utf8ToString(); + } + assertAcked(leaderClient().admin().indices().prepareCreate("leader").setSource(aliases, XContentType.JSON)); + final PutFollowAction.Request followRequest = putFollow("leader", "follower"); + followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); + + ensureFollowerGreen(true, "follower"); + + // wait for the shard follow task to exist + assertBusy(() -> assertShardFollowTask(1)); + + assertAliasesExist("leader", "follower", aliasName); + } + + public void testAddAlias() throws Exception { + runAddAliasTest(null); + } + + public void testAddExplicitNotWriteAlias() throws Exception { + runAddAliasTest(false); + } + + public void testWriteAliasIsIgnored() throws Exception { + runAddAliasTest(true); + } + + private void runAddAliasTest(final Boolean isWriteAlias) throws Exception { + runAddAliasTest(isWriteAlias, aliasName -> {}); + } + + /** + * Runs an add alias test which adds a random alias to the leader exist, and then asserts that the alias is replicated to the follower. + * The specified post assertions gives the caller the opportunity to add additional assertions on the alias that is added. These + * assertions are executed after all other assertions that the alias exists. + * + * @param isWriteIndex whether or not the leader index is the write index for the alias + * @param postAssertions the post assertions to execute + * @param the type of checked exception the post assertions callback can throw + * @throws Exception if a checked exception is thrown while executing the add alias test + */ + private void runAddAliasTest( + final Boolean isWriteIndex, + final CheckedConsumer postAssertions) throws Exception { + assertAcked(leaderClient().admin().indices().prepareCreate("leader")); + final PutFollowAction.Request followRequest = putFollow("leader", "follower"); + // we set a low poll timeout so that shard changes requests are responded to quickly even without indexing + followRequest.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(100)); + followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); + + ensureFollowerGreen(true, "follower"); + + assertBusy(() -> assertShardFollowTask(1)); + + final String aliasName = randomAlphaOfLength(16); + addRandomAlias("leader", aliasName, isWriteIndex); + + assertAliasesExist("leader", "follower", aliasName); + + postAssertions.accept(aliasName); + } + + private void addRandomAlias(final String index, final String aliasName, final Boolean isWriteIndex) { + final IndicesAliasesRequest.AliasActions add = IndicesAliasesRequest.AliasActions.add(); + add.index(index); + add.alias(aliasName); + add.writeIndex(isWriteIndex); + if (randomBoolean()) { + add.routing(randomAlphaOfLength(16)); + } else { + if (randomBoolean()) { + add.indexRouting(randomAlphaOfLength(16)); + } + if (randomBoolean()) { + add.searchRouting(randomAlphaOfLength(16)); + } + } + if (randomBoolean()) { + add.filter(termQuery(randomAlphaOfLength(16), randomAlphaOfLength(16))); + } + + assertAcked(leaderClient().admin().indices().prepareAliases().addAliasAction(add)); + } + + public void testAddMultipleAliasesAtOnce() throws Exception { + assertAcked(leaderClient().admin().indices().prepareCreate("leader")); + final PutFollowAction.Request followRequest = putFollow("leader", "follower"); + // we set a low poll timeout so that shard changes requests are responded to quickly even without indexing + followRequest.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(100)); + followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); + + ensureFollowerGreen(true, "follower"); + + assertBusy(() -> assertShardFollowTask(1)); + + final int numberOfAliases = randomIntBetween(2, 8); + final IndicesAliasesRequestBuilder builder = leaderClient().admin().indices().prepareAliases(); + for (int i = 0; i < numberOfAliases; i++) { + builder.addAlias("leader", "alias_" + i); + } + assertAcked(builder); + + final String[] aliases = new String[numberOfAliases]; + for (int i = 0; i < numberOfAliases; i++) { + aliases[i] = "alias_" + i; + } + assertAliasesExist("leader", "follower", aliases); + } + + public void testAddMultipleAliasesSequentially() throws Exception { + assertAcked(leaderClient().admin().indices().prepareCreate("leader")); + final PutFollowAction.Request followRequest = putFollow("leader", "follower"); + // we set a low poll timeout so that shard changes requests are responded to quickly even without indexing + followRequest.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(100)); + followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); + + ensureFollowerGreen(true, "follower"); + + assertBusy(() -> assertShardFollowTask(1)); + + final int numberOfAliases = randomIntBetween(2, 8); + for (int i = 0; i < numberOfAliases; i++) { + assertAcked(leaderClient().admin().indices().prepareAliases().addAlias("leader", "alias_" + i)); + + final String[] aliases = new String[i + 1]; + for (int j = 0; j < i + 1; j++) { + aliases[j] = "alias_" + j; + } + assertAliasesExist("leader", "follower", aliases); + } + } + + public void testUpdateExistingAlias() throws Exception { + runAddAliasTest( + null, + /* + * After the alias is added (via runAddAliasTest) we modify the alias in place, and then assert that the modification is + * eventually replicated. + */ + aliasName -> { + assertAcked(leaderClient().admin() + .indices() + .prepareAliases() + .addAlias("leader", aliasName, termQuery(randomAlphaOfLength(16), randomAlphaOfLength(16)))); + assertAliasesExist("leader", "follower", aliasName); + }); + } + + public void testRemoveExistingAlias() throws Exception { + runAddAliasTest( + false, + aliasName -> { + removeAlias(aliasName); + assertAliasExistence(aliasName, false); + } + ); + } + + private void removeAlias(final String aliasName) { + assertAcked(leaderClient().admin().indices().prepareAliases().removeAlias("leader", aliasName)); + } + + public void testStress() throws Exception { + assertAcked(leaderClient().admin().indices().prepareCreate("leader")); + final PutFollowAction.Request followRequest = putFollow("leader", "follower"); + // we set a low poll timeout so that shard changes requests are responded to quickly even without indexing + followRequest.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(100)); + followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); + + final int numberOfThreads = randomIntBetween(2, 4); + final int numberOfIterations = randomIntBetween(4, 32); + final CyclicBarrier barrier = new CyclicBarrier(numberOfThreads + 1); + final List threads = new ArrayList<>(numberOfThreads); + + for (int i = 0; i < numberOfThreads; i++) { + final Thread thread = new Thread(() -> { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + for (int j = 0; j < numberOfIterations; j++) { + final String action = randomFrom("create", "update", "delete"); + switch (action) { + case "create": + addRandomAlias("leader", randomAlphaOfLength(16), randomFrom(new Boolean[] { null, false, true })); + break; + case "update": + try { + final String[] aliases = getAliasesOnLeader(); + if (aliases.length == 0) { + continue; + } + final String alias = randomFrom(aliases); + /* + * Add an alias with the same name, which acts as an update (although another thread could concurrently + * remove). + */ + addRandomAlias("leader", alias, randomFrom(new Boolean[] { null, false, true })); + } catch (final Exception e) { + throw new RuntimeException(e); + } + break; + case "delete": + try { + final String[] aliases = getAliasesOnLeader(); + if (aliases.length == 0) { + continue; + } + final String alias = randomFrom(aliases); + try { + removeAlias(alias); + } catch (final AliasesNotFoundException e) { + // ignore, it could have been deleted by another thread + continue; + } + } catch (final Exception e) { + throw new RuntimeException(e); + } + break; + default: + assert false : action; + } + } + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + }); + thread.start(); + threads.add(thread); + } + barrier.await(); + + barrier.await(); + + for (final Thread thread : threads) { + thread.join(); + } + + assertAliasesExist("leader", "follower", getAliasesOnLeader()); + } + + private String[] getAliasesOnLeader() throws InterruptedException, ExecutionException { + final GetAliasesResponse response = leaderClient().admin().indices().getAliases(new GetAliasesRequest().indices("leader")).get(); + return response.getAliases().get("leader").stream().map(AliasMetaData::alias).toArray(String[]::new); + } + + private void assertAliasesExist(final String leaderIndex, final String followerIndex, final String... aliases) throws Exception { + assertAliasesExist(leaderIndex, followerIndex, (alias, aliasMetaData) -> {}, aliases); + } + + private void assertAliasesExist( + final String leaderIndex, + final String followerIndex, + final CheckedBiConsumer aliasMetaDataAssertion, + final String... aliases) throws Exception { + // we must check serially because aliases exist will return true if any but not necessarily all of the requested aliases exist + for (final String alias : aliases) { + assertAliasExistence(alias, true); + } + + assertBusy(() -> { + final GetAliasesResponse followerResponse = + followerClient().admin().indices().getAliases(new GetAliasesRequest().indices(followerIndex)).get(); + assertThat( + "expected follower to have [" + aliases.length + "] aliases, but was " + followerResponse.getAliases().toString(), + followerResponse.getAliases().get(followerIndex), + hasSize(aliases.length)); + for (final String alias : aliases) { + final AliasMetaData followerAliasMetaData = getAliasMetaData(followerResponse, followerIndex, alias); + + final GetAliasesResponse leaderResponse = + leaderClient().admin().indices().getAliases(new GetAliasesRequest().indices(leaderIndex).aliases(alias)).get(); + final AliasMetaData leaderAliasMetaData = getAliasMetaData(leaderResponse, leaderIndex, alias); + + assertThat( + "alias [" + alias + "] index routing did not replicate, but was " + followerAliasMetaData.toString(), + followerAliasMetaData.indexRouting(), equalTo(leaderAliasMetaData.indexRouting())); + assertThat( + "alias [" + alias + "] search routing did not replicate, but was " + followerAliasMetaData.toString(), + followerAliasMetaData.searchRoutingValues(), equalTo(leaderAliasMetaData.searchRoutingValues())); + assertThat( + "alias [" + alias + "] filtering did not replicate, but was " + followerAliasMetaData.toString(), + followerAliasMetaData.filter(), equalTo(leaderAliasMetaData.filter())); + assertThat( + "alias [" + alias + "] should not be a write index, but was " + followerAliasMetaData.toString(), + followerAliasMetaData.writeIndex(), + equalTo(false)); + aliasMetaDataAssertion.accept(alias, followerAliasMetaData); + } + }); + } + + private void assertAliasExistence(final String alias, final boolean exists) throws Exception { + assertBusy(() -> { + // we must check serially because aliases exist will return true if any but not necessarily all of the requested aliases exist + final AliasesExistResponse response = followerClient().admin() + .indices() + .aliasesExist(new GetAliasesRequest().indices("follower").aliases(alias)) + .get(); + if (exists) { + assertTrue("alias [" + alias + "] did not exist", response.exists()); + } else { + assertFalse("alias [" + alias + "] exists", response.exists()); + } + }); + } + + private AliasMetaData getAliasMetaData(final GetAliasesResponse response, final String index, final String alias) { + final Optional maybeAliasMetaData = + response.getAliases().get(index).stream().filter(a -> a.getAlias().equals(alias)).findFirst(); + assertTrue("alias [" + alias + "] did not exist", maybeAliasMetaData.isPresent()); + return maybeAliasMetaData.get(); + } + + private CheckedRunnable assertShardFollowTask(final int numberOfPrimaryShards) { + return () -> { + final ClusterState clusterState = followerClient().admin().cluster().prepareState().get().getState(); + final PersistentTasksCustomMetaData taskMetadata = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + assertNotNull("task metadata for follower should exist", taskMetadata); + + final ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.setDetailed(true); + listTasksRequest.setActions(ShardFollowTask.NAME + "[c]"); + final ListTasksResponse listTasksResponse = followerClient().admin().cluster().listTasks(listTasksRequest).actionGet(); + assertThat("expected no node failures", listTasksResponse.getNodeFailures().size(), equalTo(0)); + assertThat("expected no task failures", listTasksResponse.getTaskFailures().size(), equalTo(0)); + + final List taskInfos = listTasksResponse.getTasks(); + assertThat("expected a task for each shard", taskInfos.size(), equalTo(numberOfPrimaryShards)); + final Collection> shardFollowTasks = + taskMetadata.findTasks(ShardFollowTask.NAME, Objects::nonNull); + for (final PersistentTasksCustomMetaData.PersistentTask shardFollowTask : shardFollowTasks) { + TaskInfo taskInfo = null; + final String expectedId = "id=" + shardFollowTask.getId(); + for (final TaskInfo info : taskInfos) { + if (expectedId.equals(info.getDescription())) { + taskInfo = info; + break; + } + } + assertNotNull("task info for shard follow task [" + expectedId + "] should exist", taskInfo); + } + }; + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java index a5b28caf9df..9c785ecd22c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java @@ -15,6 +15,7 @@ public class ShardChangesResponseTests extends AbstractWireSerializingTestCase fromToSlot = new HashMap<>(); @Override @@ -122,6 +127,11 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { handler.accept(settingsVersion); } + @Override + protected void innerUpdateAliases(LongConsumer handler, Consumer errorHandler) { + handler.accept(aliasesVersion); + } + @Override protected void innerSendBulkShardOperationsRequest( String followerHistoryUUID, List operations, @@ -172,8 +182,15 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { assert from >= testRun.finalExpectedGlobalCheckpoint; final long globalCheckpoint = tracker.getCheckpoint(); final long maxSeqNo = tracker.getMaxSeqNo(); - handler.accept(new ShardChangesAction.Response(0L, 0L, globalCheckpoint, maxSeqNo, randomNonNegativeLong(), - new Translog.Operation[0], 1L)); + handler.accept(new ShardChangesAction.Response( + 0L, + 0L, + 0L, + globalCheckpoint, + maxSeqNo, + randomNonNegativeLong(), + new Translog.Operation[0], + 1L)); } }; threadPool.generic().execute(task); @@ -233,10 +250,16 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { }; } - private static TestRun createTestRun(long startSeqNo, long startMappingVersion, long startSettingsVersion, int maxOperationCount) { + private static TestRun createTestRun( + final long startSeqNo, + final long startMappingVersion, + final long startSettingsVersion, + final long startAliasesVersion, + final int maxOperationCount) { long prevGlobalCheckpoint = startSeqNo; long mappingVersion = startMappingVersion; long settingsVersion = startSettingsVersion; + long aliasesVersion = startAliasesVersion; int numResponses = randomIntBetween(16, 256); Map> responses = new HashMap<>(numResponses); for (int i = 0; i < numResponses; i++) { @@ -247,7 +270,9 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { if (sometimes()) { settingsVersion++; } - + if (sometimes()) { + aliasesVersion++; + } if (sometimes()) { List item = new ArrayList<>(); // Sometimes add a random retryable error @@ -268,6 +293,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { new ShardChangesAction.Response( mappingVersion, settingsVersion, + aliasesVersion, nextGlobalCheckPoint, nextGlobalCheckPoint, randomNonNegativeLong(), @@ -293,6 +319,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { ShardChangesAction.Response response = new ShardChangesAction.Response( mappingVersion, settingsVersion, + aliasesVersion, prevGlobalCheckpoint, prevGlobalCheckpoint, randomNonNegativeLong(), @@ -312,6 +339,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { ShardChangesAction.Response response = new ShardChangesAction.Response( mappingVersion, settingsVersion, + aliasesVersion, localLeaderGCP, localLeaderGCP, randomNonNegativeLong(), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java index 9cac01d278e..413960b69c8 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java @@ -59,6 +59,7 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase< randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomReadExceptions(), randomLong(), randomBoolean() ? new ElasticsearchException("fatal error") : null); @@ -80,6 +81,7 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase< assertThat(newInstance.writeBufferOperationCount(), equalTo(expectedInstance.writeBufferOperationCount())); assertThat(newInstance.followerMappingVersion(), equalTo(expectedInstance.followerMappingVersion())); assertThat(newInstance.followerSettingsVersion(), equalTo(expectedInstance.followerSettingsVersion())); + assertThat(newInstance.followerAliasesVersion(), equalTo(expectedInstance.followerAliasesVersion())); assertThat(newInstance.totalReadTimeMillis(), equalTo(expectedInstance.totalReadTimeMillis())); assertThat(newInstance.successfulReadRequests(), equalTo(expectedInstance.successfulReadRequests())); assertThat(newInstance.failedReadRequests(), equalTo(expectedInstance.failedReadRequests())); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index 09d00dc6a33..ef1dc43869a 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -72,6 +72,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { private Queue mappingVersions; private Queue settingsUpdateFailures; private Queue settingsVersions; + private Queue aliasesUpdateFailures; + private Queue aliasesVersions; private Queue leaderGlobalCheckpoints; private Queue followerGlobalCheckpoints; private Queue maxSeqNos; @@ -88,7 +90,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { task.coordinateReads(); assertThat(shardChangesRequests, contains(new long[]{0L, 8L})); // treat this a peak request shardChangesRequests.clear(); - task.innerHandleReadResponse(0, 5L, generateShardChangesResponse(0, 5L, 0L, 0L, 60L)); + task.innerHandleReadResponse(0, 5L, generateShardChangesResponse(0, 5L, 0L, 0L, 1L, 60L)); assertThat(shardChangesRequests, contains(new long[][]{ {6L, 8L}, {14L, 8L}, {22L, 8L}, {30L, 8L}, {38L, 8L}, {46L, 8L}, {54L, 7L}} )); @@ -113,7 +115,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { shardChangesRequests.clear(); // Also invokes the coordinatesReads() method: - task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 0L, 128L)); + task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 0L, 1L, 128L)); assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer count limit has been reached ShardFollowNodeTaskStatus status = task.getStatus(); @@ -139,7 +141,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { shardChangesRequests.clear(); // Also invokes the coordinatesReads() method: - task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 0L, 128L)); + task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 0L, 1L, 128L)); assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer size limit has been reached ShardFollowNodeTaskStatus status = task.getStatus(); @@ -204,7 +206,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { task.markAsCompleted(); shardChangesRequests.clear(); // Also invokes the coordinatesReads() method: - task.innerHandleReadResponse(0L, 15L, generateShardChangesResponse(0, 15, 0L, 0L, 31L)); + task.innerHandleReadResponse(0L, 15L, generateShardChangesResponse(0, 15, 0L, 0L, 1L, 31L)); assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because task has been cancelled assertThat(bulkShardOperationRequests.size(), equalTo(0)); // no more writes, because task has been cancelled @@ -234,7 +236,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { task.markAsCompleted(); shardChangesRequests.clear(); // Also invokes the coordinatesReads() method: - task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 0L, 128L)); + task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 0L, 1L, 128L)); assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because task has been cancelled assertThat(bulkShardOperationRequests.size(), equalTo(0)); // no more writes, because task has been cancelled @@ -483,7 +485,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { startTask(task, 63, -1); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); task.innerHandleReadResponse(0L, 63L, response); assertThat(bulkShardOperationRequests.size(), equalTo(1)); @@ -513,7 +515,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); shardChangesRequests.clear(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 20, 0L, 0L, 31L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 20, 0L, 0L, 1L, 31L); task.innerHandleReadResponse(0L, 63L, response); assertThat(shardChangesRequests.size(), equalTo(1)); @@ -542,7 +544,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { shardChangesRequests.clear(); task.markAsCompleted(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 31, 0L, 0L, 31L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 31, 0L, 0L, 1L, 31L); task.innerHandleReadResponse(0L, 64L, response); assertThat(shardChangesRequests.size(), equalTo(0)); @@ -568,7 +570,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); shardChangesRequests.clear(); - task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, 0, 100, new Translog.Operation[0], 1L)); + task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, 0, 0, 100, new Translog.Operation[0], 1L)); assertThat(shardChangesRequests.size(), equalTo(1)); assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); @@ -591,7 +593,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersions.add(1L); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 1L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 1L, 0L, 0L, 63L); task.handleReadResponse(0L, 63L, response); assertThat(bulkShardOperationRequests.size(), equalTo(1)); @@ -620,7 +622,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } mappingVersions.add(1L); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 1L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 1L, 0L, 0L, 63L); task.handleReadResponse(0L, 63L, response); assertThat(mappingUpdateFailures.size(), equalTo(0)); @@ -645,7 +647,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingUpdateFailures.add(new RuntimeException()); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 64, 1L, 0L, 64L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 64, 1L, 0L, 0L, 64L); task.handleReadResponse(0L, 64L, response); assertThat(bulkShardOperationRequests.size(), equalTo(0)); @@ -668,7 +670,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { settingsVersions.add(1L); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 1L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 1L, 0L, 63L); task.handleReadResponse(0L, 63L, response); assertThat(bulkShardOperationRequests.size(), equalTo(1)); @@ -677,6 +679,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { ShardFollowNodeTaskStatus status = task.getStatus(); assertThat(status.followerMappingVersion(), equalTo(0L)); assertThat(status.followerSettingsVersion(), equalTo(1L)); + assertThat(status.followerAliasesVersion(), equalTo(0L)); assertThat(status.outstandingReadRequests(), equalTo(1)); assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); @@ -698,15 +701,16 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } settingsVersions.add(1L); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 1L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 1L, 0L, 63L); task.handleReadResponse(0L, 63L, response); - assertThat(mappingUpdateFailures.size(), equalTo(0)); + assertThat(settingsUpdateFailures.size(), equalTo(0)); assertThat(bulkShardOperationRequests.size(), equalTo(1)); assertThat(task.isStopped(), equalTo(false)); ShardFollowNodeTaskStatus status = task.getStatus(); assertThat(status.followerMappingVersion(), equalTo(0L)); assertThat(status.followerSettingsVersion(), equalTo(1L)); + assertThat(status.followerAliasesVersion(), equalTo(0L)); assertThat(status.outstandingReadRequests(), equalTo(1)); assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); @@ -723,7 +727,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { settingsUpdateFailures.add(new RuntimeException()); task.coordinateReads(); - ShardChangesAction.Response response = generateShardChangesResponse(0, 64, 0L, 1L, 64L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 64, 0L, 1L, 0L, 64L); task.handleReadResponse(0L, 64L, response); assertThat(bulkShardOperationRequests.size(), equalTo(0)); @@ -731,6 +735,89 @@ public class ShardFollowNodeTaskTests extends ESTestCase { ShardFollowNodeTaskStatus status = task.getStatus(); assertThat(status.followerMappingVersion(), equalTo(0L)); assertThat(status.followerSettingsVersion(), equalTo(0L)); + assertThat(status.followerAliasesVersion(), equalTo(0L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); + assertThat(status.lastRequestedSeqNo(), equalTo(63L)); + assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); + } + + public void testAliasUpdate() { + final ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + final ShardFollowNodeTask task = createShardFollowTask(params); + startTask(task, 63, -1); + + aliasesVersions.add(1L); + task.coordinateReads(); + final ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); + task.handleReadResponse(0L, 63L, response); + + assertThat(bulkShardOperationRequests.size(), equalTo(1)); + assertThat(bulkShardOperationRequests.get(0), equalTo(Arrays.asList(response.getOperations()))); + + final ShardFollowNodeTaskStatus status = task.getStatus(); + assertThat(status.followerMappingVersion(), equalTo(0L)); + assertThat(status.followerSettingsVersion(), equalTo(0L)); + assertThat(status.followerAliasesVersion(), equalTo(1L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); + assertThat(status.lastRequestedSeqNo(), equalTo(63L)); + assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); + assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); + } + + public void testAliasUpdateRetryableError() { + final ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + final ShardFollowNodeTask task = createShardFollowTask(params); + startTask(task, 63, -1); + + int max = randomIntBetween(1, 30); + for (int i = 0; i < max; i++) { + aliasesUpdateFailures.add(new ConnectException()); + } + aliasesVersions.add(1L); + task.coordinateReads(); + final ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); + task.handleReadResponse(0L, 63L, response); + + assertThat(aliasesUpdateFailures.size(), equalTo(0)); + assertThat(bulkShardOperationRequests.size(), equalTo(1)); + assertThat(task.isStopped(), equalTo(false)); + final ShardFollowNodeTaskStatus status = task.getStatus(); + assertThat(status.followerMappingVersion(), equalTo(0L)); + assertThat(status.followerSettingsVersion(), equalTo(0L)); + assertThat(status.followerAliasesVersion(), equalTo(1L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); + assertThat(status.lastRequestedSeqNo(), equalTo(63L)); + assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); + } + + public void testAliasUpdateNonRetryableError() { + final ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + final ShardFollowNodeTask task = createShardFollowTask(params); + startTask(task, 63, -1); + + aliasesUpdateFailures.add(new RuntimeException()); + task.coordinateReads(); + final ShardChangesAction.Response response = generateShardChangesResponse(0, 64, 0L, 0L, 1L, 64L); + task.handleReadResponse(0L, 64L, response); + + assertThat(bulkShardOperationRequests.size(), equalTo(0)); + assertThat(task.isStopped(), equalTo(true)); + final ShardFollowNodeTaskStatus status = task.getStatus(); + assertThat(status.followerMappingVersion(), equalTo(0L)); + assertThat(status.followerSettingsVersion(), equalTo(0L)); + assertThat(status.followerAliasesVersion(), equalTo(0L)); assertThat(status.outstandingReadRequests(), equalTo(1)); assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); @@ -752,7 +839,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); assertThat(shardChangesRequests.get(0)[1], equalTo(128L)); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 63L, response); @@ -772,7 +859,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { params.maxWriteRequestOperationCount = 64; params.maxOutstandingWriteRequests = 2; ShardFollowNodeTask task = createShardFollowTask(params); - ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 0L, 256L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 0L, 1L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -785,7 +872,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { params.maxOutstandingWriteRequests = 4; // change to 4 outstanding writers task = createShardFollowTask(params); - response = generateShardChangesResponse(0, 256, 0L, 0L, 256L); + response = generateShardChangesResponse(0, 256, 0L, 0L, 1L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -804,7 +891,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { params.maxWriteRequestOperationCount = 8; params.maxOutstandingWriteRequests = 32; ShardFollowNodeTask task = createShardFollowTask(params); - ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 0L, 256L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 0L, 1L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -835,7 +922,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { for (int i = 0; i < max; i++) { writeFailures.add(new ShardNotFoundException(new ShardId("leader_index", "", 0))); } - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 63L, response); @@ -864,7 +951,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); writeFailures.add(new RuntimeException()); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 63L, response); @@ -891,7 +978,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 64L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 64L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -914,7 +1001,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { shardChangesRequests.clear(); followerGlobalCheckpoints.add(63L); - ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 63L); + ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 0L, 1L, 63L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 63L, response); @@ -1013,6 +1100,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersions = new LinkedList<>(); settingsUpdateFailures = new LinkedList<>(); settingsVersions = new LinkedList<>(); + aliasesUpdateFailures = new LinkedList<>(); + aliasesVersions = new LinkedList<>(); leaderGlobalCheckpoints = new LinkedList<>(); followerGlobalCheckpoints = new LinkedList<>(); maxSeqNos = new LinkedList<>(); @@ -1048,6 +1137,20 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } } + @Override + protected void innerUpdateAliases(final LongConsumer handler, final Consumer errorHandler) { + final Exception failure = aliasesUpdateFailures.poll(); + if (failure != null) { + errorHandler.accept(failure); + return; + } + + final Long aliasesVersion = aliasesVersions.poll(); + if (aliasesVersion != null) { + handler.accept(aliasesVersion); + } + } + @Override protected void innerSendBulkShardOperationsRequest( String followerHistoryUUID, final List operations, @@ -1086,6 +1189,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { final ShardChangesAction.Response response = new ShardChangesAction.Response( mappingVersions.poll(), 0L, + 0L, leaderGlobalCheckpoints.poll(), maxSeqNos.poll(), randomNonNegativeLong(), @@ -1153,6 +1257,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { long toSeqNo, long mappingVersion, long settingsVersion, + long aliasesVersion, long leaderGlobalCheckPoint) { List ops = new ArrayList<>(); for (long seqNo = fromSeqNo; seqNo <= toSeqNo; seqNo++) { @@ -1163,6 +1268,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { return new ShardChangesAction.Response( mappingVersion, settingsVersion, + aliasesVersion, leaderGlobalCheckPoint, leaderGlobalCheckPoint, randomNonNegativeLong(), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index abef313d0b0..9da7e1522d2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -514,6 +514,12 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest handler.accept(1L); } + @Override + protected void innerUpdateAliases(LongConsumer handler, Consumer errorHandler) { + // no-op as alias updates are not tested here + handler.accept(1L); + } + @Override protected void innerSendBulkShardOperationsRequest( final String followerHistoryUUID, @@ -544,14 +550,21 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest final SeqNoStats seqNoStats = indexShard.seqNoStats(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); if (from > seqNoStats.getGlobalCheckpoint()) { - handler.accept(ShardChangesAction.getResponse(1L, 1L, seqNoStats, - maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY, 1L)); + handler.accept(ShardChangesAction.getResponse( + 1L, + 1L, + 1L, + seqNoStats, + maxSeqNoOfUpdatesOrDeletes, + ShardChangesAction.EMPTY_OPERATIONS_ARRAY, + 1L)); return; } Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, maxOperationCount, recordedLeaderIndexHistoryUUID, params.getMaxReadRequestSize()); // hard code mapping version; this is ok, as mapping updates are not tested here final ShardChangesAction.Response response = new ShardChangesAction.Response( + 1L, 1L, 1L, seqNoStats.getGlobalCheckpoint(), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java index 72ba0cd7067..4e9aadf8d82 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java @@ -57,6 +57,7 @@ public class StatsResponsesTests extends AbstractWireSerializingTestCase( - ((List>>) args[25]) + ((List>>) args[26]) .stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), - (long) args[26], - (ElasticsearchException) args[27])); + (long) args[27], + (ElasticsearchException) args[28])); public static final String READ_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-read-exceptions-entry"; @@ -123,6 +126,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_SIZE_IN_BYTES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAPPING_VERSION_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_SETTINGS_VERSION_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_ALIASES_VERSION_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_TIME_MILLIS_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_READ_REQUESTS_FIELD); @@ -243,6 +247,12 @@ public class ShardFollowNodeTaskStatus implements Task.Status { return followerSettingsVersion; } + private final long followerAliasesVersion; + + public long followerAliasesVersion() { + return followerAliasesVersion; + } + private final long totalReadTimeMillis; public long totalReadTimeMillis() { @@ -337,6 +347,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final long writeBufferSizeInBytes, final long followerMappingVersion, final long followerSettingsVersion, + final long followerAliasesVersion, final long totalReadTimeMillis, final long totalReadRemoteExecTimeMillis, final long successfulReadRequests, @@ -365,6 +376,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.writeBufferSizeInBytes = writeBufferSizeInBytes; this.followerMappingVersion = followerMappingVersion; this.followerSettingsVersion = followerSettingsVersion; + this.followerAliasesVersion = followerAliasesVersion; this.totalReadTimeMillis = totalReadTimeMillis; this.totalReadRemoteExecTimeMillis = totalReadRemoteExecTimeMillis; this.successfulReadRequests = successfulReadRequests; @@ -396,6 +408,11 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.writeBufferSizeInBytes = in.readVLong(); this.followerMappingVersion = in.readVLong(); this.followerSettingsVersion = in.readVLong(); + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + this.followerAliasesVersion = in.readVLong(); + } else { + this.followerAliasesVersion = 0L; + } this.totalReadTimeMillis = in.readVLong(); this.totalReadRemoteExecTimeMillis = in.readVLong(); this.successfulReadRequests = in.readVLong(); @@ -434,6 +451,9 @@ public class ShardFollowNodeTaskStatus implements Task.Status { out.writeVLong(writeBufferSizeInBytes); out.writeVLong(followerMappingVersion); out.writeVLong(followerSettingsVersion); + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + out.writeVLong(followerAliasesVersion); + } out.writeVLong(totalReadTimeMillis); out.writeVLong(totalReadRemoteExecTimeMillis); out.writeVLong(successfulReadRequests); @@ -484,6 +504,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { new ByteSizeValue(writeBufferSizeInBytes)); builder.field(FOLLOWER_MAPPING_VERSION_FIELD.getPreferredName(), followerMappingVersion); builder.field(FOLLOWER_SETTINGS_VERSION_FIELD.getPreferredName(), followerSettingsVersion); + builder.field(FOLLOWER_ALIASES_VERSION_FIELD.getPreferredName(), followerAliasesVersion); builder.humanReadableField( TOTAL_READ_TIME_MILLIS_FIELD.getPreferredName(), "total_read_time", @@ -564,7 +585,8 @@ public class ShardFollowNodeTaskStatus implements Task.Status { writeBufferOperationCount == that.writeBufferOperationCount && writeBufferSizeInBytes == that.writeBufferSizeInBytes && followerMappingVersion == that.followerMappingVersion && - followerSettingsVersion== that.followerSettingsVersion && + followerSettingsVersion == that.followerSettingsVersion && + followerAliasesVersion == that.followerAliasesVersion && totalReadTimeMillis == that.totalReadTimeMillis && totalReadRemoteExecTimeMillis == that.totalReadRemoteExecTimeMillis && successfulReadRequests == that.successfulReadRequests && @@ -604,6 +626,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { writeBufferSizeInBytes, followerMappingVersion, followerSettingsVersion, + followerAliasesVersion, totalReadTimeMillis, totalReadRemoteExecTimeMillis, successfulReadRequests, diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 326b8f3c7a8..89b97abac61 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -974,6 +974,9 @@ "follower_settings_version": { "type": "long" }, + "follower_aliases_version": { + "type": "long" + }, "total_read_time_millis": { "type": "long" }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java index a0ee01a2403..fe873066dc5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java @@ -188,6 +188,7 @@ public class WaitForFollowShardTasksStepTests extends AbstractStepTestCase assertOK(client().performRequest(new Request("HEAD", "/" + indexName + "/_alias/logs")))); try (RestClient leaderClient = buildLeaderClient()) { index(leaderClient, indexName, "1"); @@ -226,8 +226,8 @@ public class CCRIndexLifecycleIT extends ESCCRRestTestCase { // Check that it got replicated to the follower assertBusy(() -> assertTrue(indexExists(indexName))); - // Aliases are not copied from leader index, so we need to add that for the rollover action in follower cluster: - client().performRequest(new Request("PUT", "/" + indexName + "/_alias/" + alias)); + // check that the alias was replicated + assertBusy(() -> assertOK(client().performRequest(new Request("HEAD", "/" + indexName + "/_alias/" + alias)))); index(leaderClient, indexName, "1"); assertDocumentExists(leaderClient, indexName, "1"); @@ -252,7 +252,6 @@ public class CCRIndexLifecycleIT extends ESCCRRestTestCase { // And the old index should have a write block and indexing complete set assertThat(getIndexSetting(leaderClient, indexName, "index.blocks.write"), equalTo("true")); assertThat(getIndexSetting(leaderClient, indexName, "index.lifecycle.indexing_complete"), equalTo("true")); - }); assertBusy(() -> { @@ -266,6 +265,8 @@ public class CCRIndexLifecycleIT extends ESCCRRestTestCase { assertThat(getIndexSetting(client(), indexName, "index.xpack.ccr.following_index"), nullValue()); // The next index should have been created on the follower as well indexExists(nextIndexName); + // and the alias should be on the next index + assertOK(client().performRequest(new Request("HEAD", "/" + nextIndexName + "/_alias/" + alias))); }); assertBusy(() -> { @@ -281,6 +282,74 @@ public class CCRIndexLifecycleIT extends ESCCRRestTestCase { } } + public void testAliasReplicatedOnShrink() throws Exception { + final String indexName = "shrink-alias-test"; + final String shrunkenIndexName = "shrink-" + indexName; + final String policyName = "shrink-test-policy"; + + final int numberOfAliases = randomIntBetween(0, 4); + + if ("leader".equals(targetCluster)) { + Settings indexSettings = Settings.builder() + .put("index.soft_deletes.enabled", true) + .put("index.number_of_shards", 3) + .put("index.number_of_replicas", 0) + .put("index.lifecycle.name", policyName) // this policy won't exist on the leader, that's fine + .build(); + final StringBuilder aliases = new StringBuilder(); + boolean first = true; + for (int i = 0; i < numberOfAliases; i++) { + if (first == false) { + aliases.append(","); + } + final Boolean isWriteIndex = randomFrom(new Boolean[] { null, false, true }); + if (isWriteIndex == null) { + aliases.append("\"alias_").append(i).append("\":{}"); + } else { + aliases.append("\"alias_").append(i).append("\":{\"is_write_index\":").append(isWriteIndex).append("}"); + } + first = false; + } + createIndex(indexName, indexSettings, "", aliases.toString()); + ensureGreen(indexName); + } else if ("follow".equals(targetCluster)) { + // Create a policy with just a Shrink action on the follower + putShrinkOnlyPolicy(client(), policyName); + + // Follow the index + followIndex(indexName, indexName); + // Make sure it actually took + assertBusy(() -> assertTrue(indexExists(indexName))); + // This should now be in the "warm" phase waiting for the index to be ready to unfollow + assertBusy(() -> assertILMPolicy(client(), indexName, policyName, "warm", "unfollow", "wait-for-indexing-complete")); + + // Set the indexing_complete flag on the leader so the index will actually unfollow + try (RestClient leaderClient = buildLeaderClient()) { + updateIndexSettings(leaderClient, indexName, Settings.builder() + .put("index.lifecycle.indexing_complete", true) + .build() + ); + } + + // Wait for the setting to get replicated + assertBusy(() -> assertThat(getIndexSetting(client(), indexName, "index.lifecycle.indexing_complete"), equalTo("true"))); + + // Wait for the index to continue with its lifecycle and be shrunk + assertBusy(() -> assertTrue(indexExists(shrunkenIndexName))); + + // assert the aliases were replicated + assertBusy(() -> { + for (int i = 0; i < numberOfAliases; i++) { + assertOK(client().performRequest(new Request("HEAD", "/" + shrunkenIndexName + "/_alias/alias_" + i))); + } + }); + assertBusy(() -> assertOK(client().performRequest(new Request("HEAD", "/" + shrunkenIndexName + "/_alias/" + indexName)))); + + // Wait for the index to complete its policy + assertBusy(() -> assertILMPolicy(client(), shrunkenIndexName, policyName, "completed", "completed", "completed")); + } + } + public void testUnfollowInjectedBeforeShrink() throws Exception { final String indexName = "shrink-test"; final String shrunkenIndexName = "shrink-" + indexName; From 78be3dde255b5899dd48c08cb74927b69486bc2c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 4 Jun 2019 20:51:58 -0400 Subject: [PATCH 063/210] Enable testing against JDK 13 EA builds (#40829) This commit adds JDK 13 to the CI rotation for testing. For now, we will be testing against JDK 13 EA builds. --- .ci/java-versions.properties | 1 + .ci/matrix-build-javas.yml | 1 + .ci/matrix-runtime-javas.yml | 1 + .../java/org/elasticsearch/bootstrap/Security.java | 6 +++--- .../java/org/elasticsearch/common/inject/Binder.java | 2 +- .../inject/assistedinject/FactoryProvider.java | 2 +- .../common/lucene/search/XMoreLikeThis.java | 2 +- .../org/elasticsearch/snapshots/package-info.java | 6 +++--- .../org/elasticsearch/bootstrap/security.policy | 2 ++ .../org/elasticsearch/xpack/sql/package-info.java | 12 ++++++------ 10 files changed, 20 insertions(+), 15 deletions(-) diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties index df205e4868c..9df8850c301 100644 --- a/.ci/java-versions.properties +++ b/.ci/java-versions.properties @@ -8,3 +8,4 @@ ES_BUILD_JAVA=openjdk12 ES_RUNTIME_JAVA=java8 GRADLE_TASK=build + diff --git a/.ci/matrix-build-javas.yml b/.ci/matrix-build-javas.yml index 202fd60edea..85813a01a9d 100644 --- a/.ci/matrix-build-javas.yml +++ b/.ci/matrix-build-javas.yml @@ -7,3 +7,4 @@ ES_BUILD_JAVA: - openjdk12 + - openjdk13 diff --git a/.ci/matrix-runtime-javas.yml b/.ci/matrix-runtime-javas.yml index 0f945376ee4..a865f33710b 100644 --- a/.ci/matrix-runtime-javas.yml +++ b/.ci/matrix-runtime-javas.yml @@ -11,6 +11,7 @@ ES_RUNTIME_JAVA: - java11 - java12 - openjdk12 + - openjdk13 - zulu8 - zulu11 - zulu12 diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index 2a537186f6a..b477e45ba37 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -61,7 +61,7 @@ import static org.elasticsearch.bootstrap.FilePermissionUtils.addSingleFilePath; /** * Initializes SecurityManager with necessary permissions. *
    - *

    Initialization

    + *

    Initialization

    * The JVM is not initially started with security manager enabled, * instead we turn it on early in the startup process. This is a tradeoff * between security and ease of use: @@ -72,7 +72,7 @@ import static org.elasticsearch.bootstrap.FilePermissionUtils.addSingleFilePath; * otherwise be permitted. * *
    - *

    Permissions

    + *

    Permissions

    * Permissions use a policy file packaged as a resource, this file is * also used in tests. File permissions are generated dynamically and * combined with this policy file. @@ -92,7 +92,7 @@ import static org.elasticsearch.bootstrap.FilePermissionUtils.addSingleFilePath; * cleanups to the scripting apis). But still it can provide some defense for users * that enable dynamic scripting without being fully aware of the consequences. *
    - *

    Debugging Security

    + *

    Debugging Security

    * A good place to start when there is a problem is to turn on security debugging: *
      * ES_JAVA_OPTS="-Djava.security.debug=access,failure" bin/elasticsearch
    diff --git a/server/src/main/java/org/elasticsearch/common/inject/Binder.java b/server/src/main/java/org/elasticsearch/common/inject/Binder.java
    index 03d164bcbaa..55e219047c2 100644
    --- a/server/src/main/java/org/elasticsearch/common/inject/Binder.java
    +++ b/server/src/main/java/org/elasticsearch/common/inject/Binder.java
    @@ -31,7 +31,7 @@ import java.lang.annotation.Annotation;
      * used to create an {@link Injector}. Guice provides this object to your
      * application's {@link Module} implementors so they may each contribute
      * their own bindings and other registrations.
    - * 

    The Guice Binding EDSL

    + *

    The Guice Binding EDSL

    *

    * Guice uses an embedded domain-specific language, or EDSL, to help you * create bindings simply and readably. This approach is great for overall diff --git a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java index 9e2d0e37986..2ef0b89fb15 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java +++ b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java @@ -39,7 +39,7 @@ import static java.util.Collections.unmodifiableSet; /** * Provides a factory that combines the caller's arguments with injector-supplied values to * construct objects. - *

    Defining a factory

    + *

    Defining a factory

    * Create an interface whose methods return the constructed type, or any of its supertypes. The * method's parameters are the arguments required to build the constructed type. *
    public interface PaymentFactory {
    diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java
    index 1010c917eca..b958b293183 100644
    --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java
    +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java
    @@ -97,7 +97,7 @@ import java.util.Set;
      * above.  The frequency and length thresholds could be parameters, etc.
      * Doug
      * 
    - *

    Initial Usage

    + *

    Initial Usage

    *

    * This class has lots of options to try to make it efficient and flexible. * The simplest possible usage is as follows. The bold diff --git a/server/src/main/java/org/elasticsearch/snapshots/package-info.java b/server/src/main/java/org/elasticsearch/snapshots/package-info.java index d5149c9bd35..010e63eae6b 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/package-info.java +++ b/server/src/main/java/org/elasticsearch/snapshots/package-info.java @@ -20,7 +20,7 @@ /** *

    This package exposes the Elasticsearch Snapshot functionality.

    * - *

    Preliminaries

    + *

    Preliminaries

    * *

    There are two communication channels between all nodes and master in the snapshot functionality:

    *
      @@ -32,7 +32,7 @@ * snapshot's entry in the cluster state accordingly. *
    * - *

    Snapshot Creation

    + *

    Snapshot Creation

    *

    Snapshots are created by the following sequence of events:

    *
      *
    1. An invocation of {@link org.elasticsearch.snapshots.SnapshotsService#createSnapshot} enqueues a cluster state update to create @@ -68,7 +68,7 @@ *
    2. *
    * - *

    Deleting a Snapshot

    + *

    Deleting a Snapshot

    * *

    Deleting a snapshot can take the form of either simply deleting it from the repository or (if it has not completed yet) aborting it * and subsequently deleting it from the repository.

    diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index fbfa0f39b16..415828b8a53 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -55,6 +55,8 @@ grant codeBase "${codebase.elasticsearch-plugin-classloader}" { //// Everything else: grant { + // needed by vendored Guice + permission java.lang.RuntimePermission "accessClassInPackage.jdk.internal.vm.annotation"; // checked by scripting engines, and before hacks and other issues in // third party code, to safeguard these against unprivileged code like scripts. diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/package-info.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/package-info.java index 9528df0332d..2a55be6e816 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/package-info.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/package-info.java @@ -13,14 +13,14 @@ * if possible, to one (at the moment) query DSL. Of course, this means * not all SQL queries are supported.
    * - *

    Premise

    + *

    Premise

    * Since Elasticsearch is not a database nor does it supports arbitrary * {@code JOIN}s (a cornerstone of SQL), SQL module is built from the * ground-up with Elasticsearch in mind first and SQL second. In fact, * even the grammar introduces Elasticsearch specific components that * have no concept in ANSI SQL. * - *

    Architecture

    + *

    Architecture

    * SQL module is roughly based on the Volcano project (by Graefe * {@code &} co) * [1] @@ -53,7 +53,7 @@ * (to different degrees) by the majority of SQL engines out there such * as Apache Calcite, Apache Impala, Apache Spark and Facebook Presto. * - *

    Concepts

    + *

    Concepts

    * * The building operation of the SQL engine is defined by an action, * namely a rule (defined in {@link org.elasticsearch.xpack.sql.rule rule} @@ -112,8 +112,8 @@ *
  • The Elasticsearch query gets executed
  • * * - *

    Digression - Visitors, pattern matching, {@code instanceof} and - * Java 10/11/12

    + *

    Digression - Visitors, pattern matching, {@code instanceof} and + * Java 10/11/12

    * * To implement the above concepts, several choices have been made in the * engine (which are not common in the rest of the XPack code base). In @@ -146,7 +146,7 @@ * {@link org.elasticsearch.xpack.sql.tree.Node#transformDown(java.util.function.Function, Class) * pre-order transformation}). * - *

    Components

    + *

    Components

    * * The SQL engine is made up of the following components: *
    From be2365811455efb6bf2628574380721aec7975d8 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 4 Jun 2019 17:53:24 -0700 Subject: [PATCH 064/210] Avoid clobbering shared testcluster JAR files when installing modules (#42879) (cherry picked from commit 6da9aa29170c840bba08637c106c85bf16359979) --- .../elasticsearch/gradle/testclusters/ElasticsearchNode.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index bba94f6c7d1..83a702f8a85 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -23,6 +23,7 @@ import org.elasticsearch.gradle.Distribution; import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; +import org.gradle.api.file.DuplicatesStrategy; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; @@ -361,6 +362,9 @@ public class ElasticsearchNode implements TestClusterConfiguration { private void installModules() { if (distribution == Distribution.INTEG_TEST) { modules.forEach(module -> services.copy(spec -> { + // ensure we don't override any existing JARs, since these are hardlinks other clusters might be using those files + spec.setDuplicatesStrategy(DuplicatesStrategy.EXCLUDE); + if (module.getName().toLowerCase().endsWith(".zip")) { spec.from(services.zipTree(module)); } else if (module.isDirectory()) { From 00f01aaece6516c522ded37244ad9dd09cc56311 Mon Sep 17 00:00:00 2001 From: Dimitrios Liappis Date: Mon, 3 Jun 2019 15:12:29 +0300 Subject: [PATCH 065/210] Clarify heap setting in Docker docs (#42754) Add note in the Docker docs that even when container memory is limited, we still require specifying -Xms/-Xmx using one of the supported methods. --- docs/reference/setup/install/docker.asciidoc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 9037a292168..e8dd1ee9595 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -372,6 +372,12 @@ published ports with `--publish-all`, unless you are pinning one container per h . Use the `ES_JAVA_OPTS` environment variable to set heap size. For example, to use 16GB, use `-e ES_JAVA_OPTS="-Xms16g -Xmx16g"` with `docker run`. ++ +-- +NOTE: You still need to <> even if you are +https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory[limiting +memory access] to the container. +-- . Pin your deployments to a specific version of the {es} Docker image, for example +docker.elastic.co/elasticsearch/elasticsearch:{version}+. From 955aee8a07b11315ae88015998842f6e7f147174 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 5 Jun 2019 09:00:22 +0100 Subject: [PATCH 066/210] More logging in testRerouteOccursOnDiskPassingHighWatermark (#42864) This test is failing because recoveries of these empty shards are not completing in a reasonable time, but the reason for this is still obscure. This commit adds yet more logging. Relates #40174, #42424 --- .../routing/allocation/decider/MockDiskUsagesIT.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 8565beb1b89..4580c5b59ed 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; @@ -30,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.ArrayList; import java.util.Collection; @@ -51,6 +53,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase { return Collections.singletonList(MockInternalClusterInfoService.TestPlugin.class); } + @TestLogging("org.elasticsearch.indices.recovery:TRACE,org.elasticsearch.cluster.service:TRACE") public void testRerouteOccursOnDiskPassingHighWatermark() throws Exception { List nodes = internalCluster().startNodes(3); @@ -98,10 +101,16 @@ public class MockDiskUsagesIT extends ESIntegTestCase { cis.setN2Usage(realNodeNames.get(1), new DiskUsage(nodes.get(1), "n2", "_na_", 100, 50)); cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", "_na_", 100, 0)); // nothing free on node3 + logger.info("--> waiting for shards to relocate off node [{}]", realNodeNames.get(2)); + assertBusy(() -> { final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); logger.info("--> {}", clusterState.routingTable()); + final RecoveryResponse recoveryResponse = client().admin().indices() + .prepareRecoveries("test").setActiveOnly(true).setDetailed(true).get(); + logger.info("--> recoveries: {}", recoveryResponse); + final Map nodesToShardCount = new HashMap<>(); for (final RoutingNode node : clusterState.getRoutingNodes()) { logger.info("--> node {} has {} shards", @@ -118,6 +127,8 @@ public class MockDiskUsagesIT extends ESIntegTestCase { cis.setN2Usage(realNodeNames.get(1), new DiskUsage(nodes.get(1), "n2", "_na_", 100, 50)); cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", "_na_", 100, 50)); // node3 has free space now + logger.info("--> waiting for shards to rebalance back onto node [{}]", realNodeNames.get(2)); + assertBusy(() -> { final Map nodesToShardCount = new HashMap<>(); final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); From 41a9f3ae3b3b45b0918fd79e91f18d66aa73665f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 5 Jun 2019 11:00:26 +0200 Subject: [PATCH 067/210] Use reader attributes to control term dict memory useage (#42838) This change makes use of the reader attributes added in LUCENE-8671 to ensure that `_id` fields are always on-heap for best update performance and term dicts are generally off-heap on Read-Only engines. Closes #38390 --- .../index/engine/InternalEngine.java | 16 ++ .../index/engine/NoOpEngine.java | 2 +- .../index/engine/ReadOnlyEngine.java | 11 +- .../index/store/FsDirectoryFactory.java | 9 ++ .../org/elasticsearch/index/store/Store.java | 147 +----------------- .../index/engine/InternalEngineTests.java | 61 ++++++++ .../index/engine/NoOpEngineTests.java | 2 +- .../elasticsearch/index/store/StoreTests.java | 44 ------ .../index/engine/FrozenEngine.java | 4 +- .../snapshots/SourceOnlySnapshot.java | 4 +- .../SourceOnlySnapshotRepository.java | 2 +- .../index/engine/FrozenEngineTests.java | 10 +- .../index/engine/FrozenIndexTests.java | 25 ++- 13 files changed, 127 insertions(+), 210 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 9845c90a1db..59d6b48dcc8 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.engine; import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; +import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader.FSTLoadMode; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; @@ -42,7 +44,9 @@ import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream; import org.elasticsearch.Assertions; @@ -77,6 +81,7 @@ import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ElasticsearchMergePolicy; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.store.FsDirectoryFactory; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; @@ -2143,10 +2148,21 @@ public class InternalEngine extends Engine { } } + static Map getReaderAttributes(Directory directory) { + Directory unwrap = FilterDirectory.unwrap(directory); + boolean defaultOffHeap = FsDirectoryFactory.isHybridFs(unwrap) || unwrap instanceof MMapDirectory; + return Map.of( + BlockTreeTermsReader.FST_MODE_KEY, // if we are using MMAP for term dics we force all off heap unless it's the ID field + defaultOffHeap ? FSTLoadMode.OFF_HEAP.name() : FSTLoadMode.ON_HEAP.name() + , BlockTreeTermsReader.FST_MODE_KEY + "." + IdFieldMapper.NAME, // always force ID field on-heap for fast updates + FSTLoadMode.ON_HEAP.name()); + } + private IndexWriterConfig getIndexWriterConfig() { final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer()); iwc.setCommitOnClose(false); // we by default don't commit on close iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND); + iwc.setReaderAttributes(getReaderAttributes(store.directory())); iwc.setIndexDeletionPolicy(combinedDeletionPolicy); // with tests.verbose, lucene sets this up: plumb to align with filesystem stream boolean verbose = false; diff --git a/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java b/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java index a41f07e994b..7f474d1be24 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java @@ -46,7 +46,7 @@ public final class NoOpEngine extends ReadOnlyEngine { super(config, null, null, true, Function.identity()); this.stats = new SegmentsStats(); Directory directory = store.directory(); - try (DirectoryReader reader = DirectoryReader.open(directory)) { + try (DirectoryReader reader = DirectoryReader.open(directory, OFF_HEAP_READER_ATTRIBUTES)) { for (LeafReaderContext ctx : reader.getContext().leaves()) { SegmentReader segmentReader = Lucene.segmentReader(ctx.reader()); fillSegmentStats(segmentReader, true, stats); diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index b11fcbbd2ac..b4429f127be 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.engine; +import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; @@ -47,7 +48,9 @@ import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.function.BiFunction; import java.util.function.Function; @@ -62,6 +65,12 @@ import java.util.stream.Stream; */ public class ReadOnlyEngine extends Engine { + /** + * Reader attributes used for read only engines. These attributes prevent loading term dictionaries on-heap even if the field is an + * ID field. + */ + public static final Map OFF_HEAP_READER_ATTRIBUTES = Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, + BlockTreeTermsReader.FSTLoadMode.OFF_HEAP.name()); private final SegmentInfos lastCommittedSegmentInfos; private final SeqNoStats seqNoStats; private final TranslogStats translogStats; @@ -165,7 +174,7 @@ public class ReadOnlyEngine extends Engine { } protected DirectoryReader open(IndexCommit commit) throws IOException { - return DirectoryReader.open(commit); + return DirectoryReader.open(commit, OFF_HEAP_READER_ATTRIBUTES); } private DocsStats docsStats(final SegmentInfos lastCommittedSegmentInfos) { diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 84bb4c49b27..c86206763ca 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.FileSwitchDirectory; +import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.LockFactory; @@ -121,6 +122,14 @@ public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { return directory; } + /** + * Returns true iff the directory is a hybrid fs directory + */ + public static boolean isHybridFs(Directory directory) { + Directory unwrap = FilterDirectory.unwrap(directory); + return unwrap instanceof HybridDirectory; + } + static final class HybridDirectory extends NIOFSDirectory { private final FSDirectory randomAccessDirectory; diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index f860e7fd940..87a822c9596 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.BufferedChecksum; import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBufferIndexInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; @@ -46,7 +45,6 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; -import org.apache.lucene.store.RandomAccessInput; import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -98,7 +96,6 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -137,7 +134,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * this by exploiting lucene internals and wrapping the IndexInput in a simple delegate. */ public static final Setting FORCE_RAM_TERM_DICT = Setting.boolSetting("index.force_memory_term_dictionary", false, - Property.IndexScope); + Property.IndexScope, Property.Deprecated); static final String CODEC = "store"; static final int VERSION_WRITE_THROWABLE= 2; // we write throwable since 2.0 static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0 @@ -172,8 +169,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref final TimeValue refreshInterval = indexSettings.getValue(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING); logger.debug("store stats are refreshed with refresh_interval [{}]", refreshInterval); ByteSizeCachingDirectory sizeCachingDir = new ByteSizeCachingDirectory(directory, refreshInterval); - this.directory = new StoreDirectory(sizeCachingDir, Loggers.getLogger("index.store.deletes", shardId), - indexSettings.getValue(FORCE_RAM_TERM_DICT)); + this.directory = new StoreDirectory(sizeCachingDir, Loggers.getLogger("index.store.deletes", shardId)); this.shardLock = shardLock; this.onClose = onClose; @@ -712,12 +708,10 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final class StoreDirectory extends FilterDirectory { private final Logger deletesLogger; - private final boolean forceRamTermDict; - StoreDirectory(ByteSizeCachingDirectory delegateDirectory, Logger deletesLogger, boolean forceRamTermDict) { + StoreDirectory(ByteSizeCachingDirectory delegateDirectory, Logger deletesLogger) { super(delegateDirectory); this.deletesLogger = deletesLogger; - this.forceRamTermDict = forceRamTermDict; } /** Estimate the cumulative size of all files in this directory in bytes. */ @@ -744,18 +738,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref super.close(); } - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - IndexInput input = super.openInput(name, context); - if (name.endsWith(".tip") || name.endsWith(".cfs")) { - // only do this if we are reading cfs or tip file - all other files don't need this. - if (forceRamTermDict && input instanceof ByteBufferIndexInput) { - return new DeoptimizingIndexInput(input.toString(), input); - } - } - return input; - } - @Override public String toString() { return "store(" + in.toString() + ")"; @@ -1636,127 +1618,4 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref // we also don't specify a codec here and merges should use the engines for this index .setMergePolicy(NoMergePolicy.INSTANCE); } - - /** - * see {@link #FORCE_RAM_TERM_DICT} for details - */ - private static final class DeoptimizingIndexInput extends IndexInput { - - private final IndexInput in; - - private DeoptimizingIndexInput(String resourceDescription, IndexInput in) { - super(resourceDescription); - this.in = in; - } - - @Override - public IndexInput clone() { - return new DeoptimizingIndexInput(toString(), in.clone()); - } - - @Override - public void close() throws IOException { - in.close(); - } - - @Override - public long getFilePointer() { - return in.getFilePointer(); - } - - @Override - public void seek(long pos) throws IOException { - in.seek(pos); - } - - @Override - public long length() { - return in.length(); - } - - @Override - public String toString() { - return in.toString(); - } - - @Override - public IndexInput slice(String sliceDescription, long offset, long length) throws IOException { - return new DeoptimizingIndexInput(sliceDescription, in.slice(sliceDescription, offset, length)); - } - - @Override - public RandomAccessInput randomAccessSlice(long offset, long length) throws IOException { - return in.randomAccessSlice(offset, length); - } - - @Override - public byte readByte() throws IOException { - return in.readByte(); - } - - @Override - public void readBytes(byte[] b, int offset, int len) throws IOException { - in.readBytes(b, offset, len); - } - - @Override - public void readBytes(byte[] b, int offset, int len, boolean useBuffer) throws IOException { - in.readBytes(b, offset, len, useBuffer); - } - - @Override - public short readShort() throws IOException { - return in.readShort(); - } - - @Override - public int readInt() throws IOException { - return in.readInt(); - } - - @Override - public int readVInt() throws IOException { - return in.readVInt(); - } - - @Override - public int readZInt() throws IOException { - return in.readZInt(); - } - - @Override - public long readLong() throws IOException { - return in.readLong(); - } - - @Override - public long readVLong() throws IOException { - return in.readVLong(); - } - - @Override - public long readZLong() throws IOException { - return in.readZLong(); - } - - @Override - public String readString() throws IOException { - return in.readString(); - } - - @Override - public Map readMapOfStrings() throws IOException { - return in.readMapOfStrings(); - } - - @Override - public Set readSetOfStrings() throws IOException { - return in.readSetOfStrings(); - } - - @Override - public void skipBytes(long numBytes) throws IOException { - in.skipBytes(numBytes); - } - } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 703f193a412..b4a3fdb0da0 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -60,8 +60,11 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.Lock; +import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; @@ -97,6 +100,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.codec.CodecService; @@ -122,7 +126,9 @@ import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.shard.ShardUtils; +import org.elasticsearch.index.store.FsDirectoryFactory; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.SnapshotMatchers; import org.elasticsearch.index.translog.Translog; @@ -5677,4 +5683,59 @@ public class InternalEngineTests extends EngineTestCase { } assertThat(engine.config().getCircuitBreakerService().getBreaker(CircuitBreaker.ACCOUNTING).getUsed(), equalTo(0L)); } + + public void testGetReaderAttributes() throws IOException { + try(BaseDirectoryWrapper dir = newFSDirectory(createTempDir())) { + Directory unwrap = FilterDirectory.unwrap(dir); + boolean isMMap = unwrap instanceof MMapDirectory; + Map readerAttributes = InternalEngine.getReaderAttributes(dir); + assertEquals(2, readerAttributes.size()); + assertEquals("ON_HEAP", readerAttributes.get("blocktree.terms.fst._id")); + if (isMMap) { + assertEquals("OFF_HEAP", readerAttributes.get("blocktree.terms.fst")); + } else { + assertEquals("ON_HEAP", readerAttributes.get("blocktree.terms.fst")); + } + } + + try(MMapDirectory dir = new MMapDirectory(createTempDir())) { + Map readerAttributes = + InternalEngine.getReaderAttributes(randomBoolean() ? dir : + new MockDirectoryWrapper(random(), dir)); + assertEquals(2, readerAttributes.size()); + assertEquals("ON_HEAP", readerAttributes.get("blocktree.terms.fst._id")); + assertEquals("OFF_HEAP", readerAttributes.get("blocktree.terms.fst")); + } + + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); + Settings settings = settingsBuilder.build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + FsDirectoryFactory service = new FsDirectoryFactory(); + Path tempDir = createTempDir().resolve(indexSettings.getUUID()).resolve("0"); + ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(indexSettings.getIndex(), 0)); + try (Directory directory = service.newDirectory(indexSettings, path)) { + + Map readerAttributes = + InternalEngine.getReaderAttributes(randomBoolean() ? directory : + new MockDirectoryWrapper(random(), directory)); + assertEquals(2, readerAttributes.size()); + + switch (IndexModule.defaultStoreType(true)) { + case HYBRIDFS: + case MMAPFS: + assertEquals("ON_HEAP", readerAttributes.get("blocktree.terms.fst._id")); + assertEquals("OFF_HEAP", readerAttributes.get("blocktree.terms.fst")); + break; + case NIOFS: + case SIMPLEFS: + case FS: + assertEquals("ON_HEAP", readerAttributes.get("blocktree.terms.fst._id")); + assertEquals("ON_HEAP", readerAttributes.get("blocktree.terms.fst")); + break; + default: + fail("unknownw type"); + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java index f03500e6e12..de32e3e4307 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java @@ -144,7 +144,7 @@ public class NoOpEngineTests extends EngineTestCase { assertEquals(expectedDocStats.getTotalSizeInBytes(), noOpEngine.docStats().getTotalSizeInBytes()); assertEquals(expectedDocStats.getAverageSizeInBytes(), noOpEngine.docStats().getAverageSizeInBytes()); assertEquals(expectedSegmentStats.getCount(), noOpEngine.segmentsStats(includeFileSize, true).getCount()); - assertEquals(expectedSegmentStats.getMemoryInBytes(), noOpEngine.segmentsStats(includeFileSize, true).getMemoryInBytes()); + // don't compare memory in bytes since we load the index with term-dict off-heap assertEquals(expectedSegmentStats.getFileSizes().size(), noOpEngine.segmentsStats(includeFileSize, true).getFileSizes().size()); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index fc5870bc8e1..165e6c4a416 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -40,14 +40,12 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.Term; import org.apache.lucene.store.BaseDirectoryWrapper; -import org.apache.lucene.store.ByteBufferIndexInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.NIOFSDirectory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; @@ -1084,48 +1082,6 @@ public class StoreTests extends ESTestCase { } } - public void testDeoptimizeMMap() throws IOException { - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("index", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .put(Store.FORCE_RAM_TERM_DICT.getKey(), true).build()); - final ShardId shardId = new ShardId("index", "_na_", 1); - String file = "test." + (randomBoolean() ? "tip" : "cfs"); - try (Store store = new Store(shardId, indexSettings, new MMapDirectory(createTempDir()), new DummyShardLock(shardId))) { - try (IndexOutput output = store.directory().createOutput(file, IOContext.DEFAULT)) { - output.writeInt(0); - } - try (IndexOutput output = store.directory().createOutput("someOtherFile.txt", IOContext.DEFAULT)) { - output.writeInt(0); - } - try (IndexInput input = store.directory().openInput(file, IOContext.DEFAULT)) { - assertFalse(input instanceof ByteBufferIndexInput); - assertFalse(input.clone() instanceof ByteBufferIndexInput); - assertFalse(input.slice("foo", 1, 1) instanceof ByteBufferIndexInput); - } - - try (IndexInput input = store.directory().openInput("someOtherFile.txt", IOContext.DEFAULT)) { - assertTrue(input instanceof ByteBufferIndexInput); - assertTrue(input.clone() instanceof ByteBufferIndexInput); - assertTrue(input.slice("foo", 1, 1) instanceof ByteBufferIndexInput); - } - } - - indexSettings = IndexSettingsModule.newIndexSettings("index", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .put(Store.FORCE_RAM_TERM_DICT.getKey(), false).build()); - - try (Store store = new Store(shardId, indexSettings, new MMapDirectory(createTempDir()), new DummyShardLock(shardId))) { - try (IndexOutput output = store.directory().createOutput(file, IOContext.DEFAULT)) { - output.writeInt(0); - } - try (IndexInput input = store.directory().openInput(file, IOContext.DEFAULT)) { - assertTrue(input instanceof ByteBufferIndexInput); - assertTrue(input.clone() instanceof ByteBufferIndexInput); - assertTrue(input.slice("foo", 1, 1) instanceof ByteBufferIndexInput); - } - } - } - public void testGetPendingFiles() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); final String testfile = "testfile"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java index 50f1125b275..631bd0b9ef9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java @@ -78,7 +78,7 @@ public final class FrozenEngine extends ReadOnlyEngine { boolean success = false; Directory directory = store.directory(); - try (DirectoryReader reader = DirectoryReader.open(directory)) { + try (DirectoryReader reader = DirectoryReader.open(directory, OFF_HEAP_READER_ATTRIBUTES)) { canMatchReader = ElasticsearchDirectoryReader.wrap(new RewriteCachingDirectoryReader(directory, reader.leaves()), config.getShardId()); // we record the segment stats here - that's what the reader needs when it's open and it give the user @@ -168,7 +168,7 @@ public final class FrozenEngine extends ReadOnlyEngine { for (ReferenceManager.RefreshListener listeners : config ().getInternalRefreshListener()) { listeners.beforeRefresh(); } - reader = DirectoryReader.open(engineConfig.getStore().directory()); + reader = DirectoryReader.open(engineConfig.getStore().directory(), OFF_HEAP_READER_ATTRIBUTES); processReader(reader); reader = lastOpenedReader = wrapReader(reader, Function.identity()); reader.getReaderCacheHelper().addClosedListener(this::onReaderClosed); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshot.java index 102bcde9dc7..2129b14993d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshot.java @@ -38,6 +38,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.engine.ReadOnlyEngine; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -80,7 +81,8 @@ public class SourceOnlySnapshot { List createdFiles = new ArrayList<>(); String segmentFileName; try (Lock writeLock = targetDirectory.obtainLock(IndexWriter.WRITE_LOCK_NAME); - StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(commit)) { + StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(commit, + ReadOnlyEngine.OFF_HEAP_READER_ATTRIBUTES)) { SegmentInfos segmentInfos = reader.getSegmentInfos().clone(); DirectoryReader wrappedReader = wrapReader(reader); List newInfos = new ArrayList<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java index bb5819e1bda..b2fb833f34b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java @@ -135,7 +135,7 @@ public final class SourceOnlySnapshotRepository extends FilterRepository { final long maxDoc = segmentInfos.totalMaxDoc(); tempStore.bootstrapNewHistory(maxDoc, maxDoc); store.incRef(); - try (DirectoryReader reader = DirectoryReader.open(tempStore.directory())) { + try (DirectoryReader reader = DirectoryReader.open(tempStore.directory(), ReadOnlyEngine.OFF_HEAP_READER_ATTRIBUTES)) { IndexCommit indexCommit = reader.getIndexCommit(); super.snapshotShard(tempStore, mapperService, snapshotId, indexId, indexCommit, snapshotStatus); } finally { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java index 17019b0ac18..4cf21cfbecd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -146,12 +147,17 @@ public class FrozenEngineTests extends EngineTestCase { null, listener, null, globalCheckpoint::get, new HierarchyCircuitBreakerService(defaultSettings.getSettings(), new ClusterSettings(defaultSettings.getNodeSettings(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); CircuitBreaker breaker = config.getCircuitBreakerService().getBreaker(CircuitBreaker.ACCOUNTING); - long expectedUse; + final int docs; try (InternalEngine engine = createEngine(config)) { - addDocuments(globalCheckpoint, engine); + docs = addDocuments(globalCheckpoint, engine); engine.flush(false, true); // first flush to make sure we have a commit that we open in the frozen engine blow. engine.refresh("test"); // pull the reader to account for RAM in the breaker. + } + final long expectedUse; + try (ReadOnlyEngine readOnlyEngine = new ReadOnlyEngine(config, null, null, true, i -> i)) { expectedUse = breaker.getUsed(); + DocsStats docsStats = readOnlyEngine.docStats(); + assertEquals(docs, docsStats.getCount()); } assertTrue(expectedUse > 0); assertEquals(0, breaker.getUsed()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java index 9231bad9a8d..bbef530fe49 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java @@ -46,7 +46,6 @@ import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -64,7 +63,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { return pluginList(XPackPlugin.class); } - public void testCloseFreezeAndOpen() throws ExecutionException, InterruptedException { + public void testCloseFreezeAndOpen() { createIndex("index", Settings.builder().put("index.number_of_shards", 2).build()); client().prepareIndex("index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("index", "_doc", "2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -106,7 +105,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { } while (searchResponse.getHits().getHits().length > 0); } - public void testSearchAndGetAPIsAreThrottled() throws InterruptedException, IOException, ExecutionException { + public void testSearchAndGetAPIsAreThrottled() throws InterruptedException, IOException { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("field").field("type", "text").field("term_vector", "with_positions_offsets_payloads") .endObject().endObject() @@ -150,7 +149,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertEquals(numRefreshes, index.getTotal().refresh.getTotal()); } - public void testFreezeAndUnfreeze() throws InterruptedException, ExecutionException { + public void testFreezeAndUnfreeze() { createIndex("index", Settings.builder().put("index.number_of_shards", 2).build()); client().prepareIndex("index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("index", "_doc", "2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -190,7 +189,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertTrue(FrozenEngine.INDEX_FROZEN.get(indexService.getIndexSettings().getSettings())); } - public void testDoubleFreeze() throws ExecutionException, InterruptedException { + public void testDoubleFreeze() { createIndex("test-idx", Settings.builder().put("index.number_of_shards", 2).build()); XPackClient xPackClient = new XPackClient(client()); assertAcked(xPackClient.freeze(new TransportFreezeIndexAction.FreezeRequest("test-idx"))); @@ -201,7 +200,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertEquals("no index found to freeze", executionException.getCause().getMessage()); } - public void testUnfreezeClosedIndices() throws ExecutionException, InterruptedException { + public void testUnfreezeClosedIndices() { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); client().prepareIndex("idx", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); createIndex("idx-closed", Settings.builder().put("index.number_of_shards", 1).build()); @@ -217,7 +216,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertHitCount(client().prepareSearch().get(), 1L); } - public void testFreezePattern() throws ExecutionException, InterruptedException { + public void testFreezePattern() { createIndex("test-idx", Settings.builder().put("index.number_of_shards", 1).build()); client().prepareIndex("test-idx", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); createIndex("test-idx-1", Settings.builder().put("index.number_of_shards", 1).build()); @@ -241,7 +240,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertEquals(0, index.getTotal().refresh.getTotal()); } - public void testCanMatch() throws ExecutionException, InterruptedException, IOException { + public void testCanMatch() throws IOException { createIndex("index"); client().prepareIndex("index", "_doc", "1").setSource("field", "2010-01-05T02:00").setRefreshPolicy(IMMEDIATE).execute() .actionGet(); @@ -298,7 +297,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { } } - public void testWriteToFrozenIndex() throws ExecutionException, InterruptedException { + public void testWriteToFrozenIndex() { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); client().prepareIndex("idx", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); XPackClient xPackClient = new XPackClient(client()); @@ -308,7 +307,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().prepareIndex("idx", "_doc", "2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get()); } - public void testIgnoreUnavailable() throws ExecutionException, InterruptedException { + public void testIgnoreUnavailable() { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); createIndex("idx-close", Settings.builder().put("index.number_of_shards", 1).build()); assertAcked(client().admin().indices().prepareClose("idx-close")); @@ -320,7 +319,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().admin().cluster().prepareState().get().getState().metaData().index("idx-close").getState()); } - public void testUnfreezeClosedIndex() throws ExecutionException, InterruptedException { + public void testUnfreezeClosedIndex() { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); XPackClient xPackClient = new XPackClient(client()); assertAcked(xPackClient.freeze(new TransportFreezeIndexAction.FreezeRequest("idx"))); @@ -337,7 +336,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().admin().cluster().prepareState().get().getState().metaData().index("idx").getState()); } - public void testFreezeIndexIncreasesIndexSettingsVersion() throws ExecutionException, InterruptedException { + public void testFreezeIndexIncreasesIndexSettingsVersion() { final String index = "test"; createIndex(index, Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build()); client().prepareIndex(index, "_doc").setSource("field", "value").execute().actionGet(); @@ -378,7 +377,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertIndexFrozen(indexName); } - public void testRecoveryState() throws ExecutionException, InterruptedException { + public void testRecoveryState() { final String indexName = "index_recovery_state"; createIndex(indexName, Settings.builder() .put("index.number_of_replicas", 0) From de0ea4bbf77235fd25e9a340b648be981b9218f7 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 5 Jun 2019 08:47:33 +0200 Subject: [PATCH 068/210] Deduplicate alias and concrete fields in query field expansion (#42328) The full-text query parsers accept field pattern that are expanded using the mapping. Alias field are also detected during the expansion but they are not deduplicated with the concrete fields that are found from other patterns (or the same). This change ensures that we deduplicate the target fields of the full-text query parsers in order to avoid adding the same clause multiple times. Boolean queries are already able to deduplicate clauses during rewrite but since we also use DisjunctionMaxQuery it is preferable to detect these duplicates early on. --- .../index/search/QueryParserHelper.java | 19 ++++++++++++-- .../query/MultiMatchQueryBuilderTests.java | 26 +++---------------- .../query/QueryStringQueryBuilderTests.java | 19 +++++++++++--- .../query/SimpleQueryStringBuilderTests.java | 4 +-- .../index/search/MultiMatchQueryTests.java | 4 +-- 5 files changed, 40 insertions(+), 32 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index 3acf2929687..fafe515ec09 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -55,6 +55,10 @@ public final class QueryParserHelper { } else { fieldName = field; } + // handle duplicates + if (fieldsAndWeights.containsKey(field)) { + boost *= fieldsAndWeights.get(field); + } fieldsAndWeights.put(fieldName, boost); } return fieldsAndWeights; @@ -84,7 +88,13 @@ public final class QueryParserHelper { float weight = fieldEntry.getValue() == null ? 1.0f : fieldEntry.getValue(); Map fieldMap = resolveMappingField(context, fieldEntry.getKey(), weight, !multiField, !allField, fieldSuffix); - resolvedFields.putAll(fieldMap); + for (Map.Entry field : fieldMap.entrySet()) { + float boost = field.getValue(); + if (resolvedFields.containsKey(field.getKey())) { + boost *= resolvedFields.get(field.getKey()); + } + resolvedFields.put(field.getKey(), boost); + } } checkForTooManyFields(resolvedFields, context); return resolvedFields; @@ -149,7 +159,12 @@ public final class QueryParserHelper { // other exceptions are parsing errors or not indexed fields: keep } } - fields.put(fieldName, weight); + // handle duplicates + float w = weight; + if (fields.containsKey(fieldType.name())) { + w *= fields.get(fieldType.name()); + } + fields.put(fieldType.name(), w); } checkForTooManyFields(fields, context); return fields; diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 970a4c3a37e..452c670cea8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -241,10 +241,9 @@ public class MultiMatchQueryBuilderTests extends FullTextQueryTestCase terms = new ArrayList<>(); + terms.add(new BoostQuery(new TermQuery(new Term(STRING_FIELD_NAME, "first")), 0.075f)); + terms.add(new BoostQuery(new TermQuery(new Term(STRING_FIELD_NAME_2, "first")), 0.5f)); + Query expected = new DisjunctionMaxQuery(terms, 1.0f); + assertEquals(expected, query); + } + private static IndexMetaData newIndexMeta(String name, Settings oldIndexSettings, Settings indexSettings) { Settings build = Settings.builder().put(oldIndexSettings) .put(indexSettings) @@ -1557,7 +1568,7 @@ public class QueryStringQueryBuilderTests extends FullTextQueryTestCase fields = new HashMap<>(); for (int i = 0; i < fieldCount; i++) { - if (randomBoolean()) { + if (i == 0) { String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME); fields.put(fieldName, AbstractQueryBuilder.DEFAULT_BOOST); } else { @@ -786,7 +786,7 @@ public class SimpleQueryStringBuilderTests extends FullTextQueryTestCase Date: Wed, 5 Jun 2019 11:24:58 +0200 Subject: [PATCH 069/210] Remove post Java 9 API usage after backport --- .../org/elasticsearch/index/engine/InternalEngine.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 59d6b48dcc8..2c4ee784f62 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -95,6 +95,7 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -2151,11 +2152,12 @@ public class InternalEngine extends Engine { static Map getReaderAttributes(Directory directory) { Directory unwrap = FilterDirectory.unwrap(directory); boolean defaultOffHeap = FsDirectoryFactory.isHybridFs(unwrap) || unwrap instanceof MMapDirectory; - return Map.of( - BlockTreeTermsReader.FST_MODE_KEY, // if we are using MMAP for term dics we force all off heap unless it's the ID field - defaultOffHeap ? FSTLoadMode.OFF_HEAP.name() : FSTLoadMode.ON_HEAP.name() - , BlockTreeTermsReader.FST_MODE_KEY + "." + IdFieldMapper.NAME, // always force ID field on-heap for fast updates + HashMap map = new HashMap(2); + map.put(BlockTreeTermsReader.FST_MODE_KEY, // if we are using MMAP for term dics we force all off heap unless it's the ID field + defaultOffHeap ? FSTLoadMode.OFF_HEAP.name() : FSTLoadMode.ON_HEAP.name()); + map.put(BlockTreeTermsReader.FST_MODE_KEY + "." + IdFieldMapper.NAME, // always force ID field on-heap for fast updates FSTLoadMode.ON_HEAP.name()); + return Collections.unmodifiableMap(map); } private IndexWriterConfig getIndexWriterConfig() { From d3524fdd0633d4b1f979429ca768d46b64a642de Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 5 Jun 2019 11:25:19 +0200 Subject: [PATCH 070/210] Add back import after backport --- server/src/main/java/org/elasticsearch/index/store/Store.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 87a822c9596..e9430ea7aa0 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -96,6 +96,7 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; From ebec118ccf66e4d122558a1ff0231a9627e08f6a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 5 Jun 2019 12:10:02 +0200 Subject: [PATCH 071/210] Bring back ExecutionException after backport --- .../index/engine/FrozenIndexTests.java | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java index bbef530fe49..460b075c22b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java @@ -46,6 +46,7 @@ import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -63,7 +64,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { return pluginList(XPackPlugin.class); } - public void testCloseFreezeAndOpen() { + public void testCloseFreezeAndOpen() throws ExecutionException, InterruptedException { createIndex("index", Settings.builder().put("index.number_of_shards", 2).build()); client().prepareIndex("index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("index", "_doc", "2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -105,7 +106,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { } while (searchResponse.getHits().getHits().length > 0); } - public void testSearchAndGetAPIsAreThrottled() throws InterruptedException, IOException { + public void testSearchAndGetAPIsAreThrottled() throws InterruptedException, IOException, ExecutionException { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("field").field("type", "text").field("term_vector", "with_positions_offsets_payloads") .endObject().endObject() @@ -149,7 +150,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertEquals(numRefreshes, index.getTotal().refresh.getTotal()); } - public void testFreezeAndUnfreeze() { + public void testFreezeAndUnfreeze() throws ExecutionException, InterruptedException { createIndex("index", Settings.builder().put("index.number_of_shards", 2).build()); client().prepareIndex("index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("index", "_doc", "2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -189,7 +190,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertTrue(FrozenEngine.INDEX_FROZEN.get(indexService.getIndexSettings().getSettings())); } - public void testDoubleFreeze() { + public void testDoubleFreeze() throws ExecutionException, InterruptedException { createIndex("test-idx", Settings.builder().put("index.number_of_shards", 2).build()); XPackClient xPackClient = new XPackClient(client()); assertAcked(xPackClient.freeze(new TransportFreezeIndexAction.FreezeRequest("test-idx"))); @@ -200,7 +201,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertEquals("no index found to freeze", executionException.getCause().getMessage()); } - public void testUnfreezeClosedIndices() { + public void testUnfreezeClosedIndices() throws ExecutionException, InterruptedException { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); client().prepareIndex("idx", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); createIndex("idx-closed", Settings.builder().put("index.number_of_shards", 1).build()); @@ -216,7 +217,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertHitCount(client().prepareSearch().get(), 1L); } - public void testFreezePattern() { + public void testFreezePattern() throws ExecutionException, InterruptedException { createIndex("test-idx", Settings.builder().put("index.number_of_shards", 1).build()); client().prepareIndex("test-idx", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); createIndex("test-idx-1", Settings.builder().put("index.number_of_shards", 1).build()); @@ -240,7 +241,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertEquals(0, index.getTotal().refresh.getTotal()); } - public void testCanMatch() throws IOException { + public void testCanMatch() throws IOException, ExecutionException, InterruptedException { createIndex("index"); client().prepareIndex("index", "_doc", "1").setSource("field", "2010-01-05T02:00").setRefreshPolicy(IMMEDIATE).execute() .actionGet(); @@ -297,7 +298,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { } } - public void testWriteToFrozenIndex() { + public void testWriteToFrozenIndex() throws ExecutionException, InterruptedException { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); client().prepareIndex("idx", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); XPackClient xPackClient = new XPackClient(client()); @@ -307,7 +308,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().prepareIndex("idx", "_doc", "2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get()); } - public void testIgnoreUnavailable() { + public void testIgnoreUnavailable() throws ExecutionException, InterruptedException { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); createIndex("idx-close", Settings.builder().put("index.number_of_shards", 1).build()); assertAcked(client().admin().indices().prepareClose("idx-close")); @@ -319,7 +320,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().admin().cluster().prepareState().get().getState().metaData().index("idx-close").getState()); } - public void testUnfreezeClosedIndex() { + public void testUnfreezeClosedIndex() throws ExecutionException, InterruptedException { createIndex("idx", Settings.builder().put("index.number_of_shards", 1).build()); XPackClient xPackClient = new XPackClient(client()); assertAcked(xPackClient.freeze(new TransportFreezeIndexAction.FreezeRequest("idx"))); @@ -336,7 +337,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().admin().cluster().prepareState().get().getState().metaData().index("idx").getState()); } - public void testFreezeIndexIncreasesIndexSettingsVersion() { + public void testFreezeIndexIncreasesIndexSettingsVersion() throws ExecutionException, InterruptedException { final String index = "test"; createIndex(index, Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build()); client().prepareIndex(index, "_doc").setSource("field", "value").execute().actionGet(); @@ -377,7 +378,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertIndexFrozen(indexName); } - public void testRecoveryState() { + public void testRecoveryState() throws ExecutionException, InterruptedException { final String indexName = "index_recovery_state"; createIndex(indexName, Settings.builder() .put("index.number_of_replicas", 0) From d5baedb78963909c3f720373424dadc21267488a Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 5 Jun 2019 10:52:25 +0100 Subject: [PATCH 072/210] [ML] Change dots in CSV column names to underscores (#42839) Dots in the column names cause an error in the ingest pipeline, as dots are special characters in ingest pipeline. This PR changes dots into underscores in CSV field names suggested by the ML find_file_structure endpoint _unless_ the field names are specifically overridden. The reason for allowing them in overrides is that fields that are not mentioned in the ingest pipeline can contain dots. But it's more consistent that the default behaviour is to replace them all. Fixes elastic/kibana#26800 --- .../DelimitedFileStructureFinder.java | 4 +-- .../DelimitedFileStructureFinderTests.java | 33 +++++++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java index aa889059626..46e4027380a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java @@ -62,12 +62,12 @@ public class DelimitedFileStructureFinder implements FileStructureFinder { } columnNames = overriddenColumnNames.toArray(new String[0]); } else { - // The column names are the header names but with blanks named column1, column2, etc. + // The column names are the header names but with dots replaced with underscores and blanks named column1, column2, etc. columnNames = new String[header.length]; for (int i = 0; i < header.length; ++i) { assert header[i] != null; String rawHeader = trimFields ? header[i].trim() : header[i]; - columnNames[i] = rawHeader.isEmpty() ? "column" + (i + 1) : rawHeader; + columnNames[i] = rawHeader.isEmpty() ? "column" + (i + 1) : rawHeader.replace('.', '_'); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java index 7b157555eef..4a7c5b87d21 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java @@ -364,6 +364,39 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getJodaTimestampFormats()); } + public void testCreateConfigsGivenDotInFieldName() throws Exception { + String sample = "time.iso8601,message\n" + + "2018-05-17T13:41:23,hello\n" + + "2018-05-17T13:41:32,hello again\n"; + assertTrue(csvFactory.canCreateFromSample(explanation, sample)); + + String charset = randomFrom(POSSIBLE_CHARSETS); + Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, + FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); + + FileStructure structure = structureFinder.getStructure(); + + assertEquals(FileStructure.Format.DELIMITED, structure.getFormat()); + assertEquals(charset, structure.getCharset()); + if (hasByteOrderMarker == null) { + assertNull(structure.getHasByteOrderMarker()); + } else { + assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); + } + // The exclude pattern needs to work on the raw text, so reflects the unmodified field names + assertEquals("^\"?time\\.iso8601\"?,\"?message\"?", structure.getExcludeLinesPattern()); + assertEquals("^\"?\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals(Character.valueOf(','), structure.getDelimiter()); + assertEquals(Character.valueOf('"'), structure.getQuote()); + assertTrue(structure.getHasHeaderRow()); + assertNull(structure.getShouldTrimFields()); + assertEquals(Arrays.asList("time_iso8601", "message"), structure.getColumnNames()); + assertNull(structure.getGrokPattern()); + assertEquals("time_iso8601", structure.getTimestampField()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); + } + public void testFindHeaderFromSampleGivenHeaderInSample() throws IOException { String withHeader = "time,airline,responsetime,sourcetype\n" + "2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" + From 293f306b9add8e9afe7447f4e298f018afa2294f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 5 Jun 2019 07:09:34 -0500 Subject: [PATCH 073/210] [ML][Data Frame] forcing that no ptask => STOPPED state (#42800) (#42860) * [ML][Data Frame] forcing that no ptask => STOPPED state * Addressing side-effect, early exit for stop when stopped --- .../core/indexing/AsyncTwoPhaseIndexer.java | 2 ++ ...portGetDataFrameTransformsStatsAction.java | 19 ++++++++++++++++++- .../transforms/DataFrameTransformTask.java | 4 ++++ .../test/data_frame/transforms_start_stop.yml | 3 --- 4 files changed, 24 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index f9bbf890fe6..efe57f44e89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -284,6 +284,8 @@ public abstract class AsyncTwoPhaseIndexer { + callOnAbort.set(false); + callOnStop.set(false); switch (prev) { case INDEXING: // ready for another job diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java index df2d09a875d..d814714ab66 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStats import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Request; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Response; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointingInfo; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.dataframe.checkpoint.DataFrameTransformsCheckpointService; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformTask; @@ -136,7 +139,21 @@ public class TransportGetDataFrameTransformsStatsAction extends ActionListener> searchStatsListener = ActionListener.wrap( stats -> { List allStateAndStats = response.getTransformsStateAndStats(); - allStateAndStats.addAll(stats); + // If the persistent task does NOT exist, it is STOPPED + // There is a potential race condition where the saved document does not actually have a STOPPED state + // as the task is cancelled before we persist state. + stats.forEach(stat -> + allStateAndStats.add(new DataFrameTransformStateAndStats( + stat.getId(), + new DataFrameTransformState(DataFrameTransformTaskState.STOPPED, + IndexerState.STOPPED, + stat.getTransformState().getPosition(), + stat.getTransformState().getCheckpoint(), + stat.getTransformState().getReason(), + stat.getTransformState().getProgress()), + stat.getTransformStats(), + stat.getCheckpointingInfo())) + ); transformsWithoutTasks.removeAll( stats.stream().map(DataFrameTransformStateAndStats::getId).collect(Collectors.toSet())); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 20ef5be09e8..28876ef2c4d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -237,6 +237,10 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S return; } + if (getIndexer().getState() == IndexerState.STOPPED) { + return; + } + IndexerState state = getIndexer().stop(); if (state == IndexerState.STOPPED) { getIndexer().doSaveState(state, getIndexer().getPosition(), () -> getIndexer().onStop()); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml index 2686c57fd06..d156344b5ad 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml @@ -90,9 +90,6 @@ teardown: - match: { airline-data-by-airline-start-stop.mappings: {} } --- "Test start/stop/start transform": - - skip: - reason: "https://github.com/elastic/elasticsearch/issues/42650" - version: "all" - do: data_frame.start_data_frame_transform: transform_id: "airline-transform-start-stop" From 0a9901925978d5a1c5bd8ebe01b7222e16920454 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 5 Jun 2019 09:47:06 -0400 Subject: [PATCH 074/210] Disable building on JDK 13 in CI This commit disables building on JDK 13 in CI. The reason for this is because Gradle is not yet ready to run on JDK 13. We could re-introduce infrastructure to enable Gralde to run on a different JDK than the build JDK, but rather than introducing such complexity we will instead wait for Gradle to be ready to run on JDK 13. --- .ci/matrix-build-javas.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.ci/matrix-build-javas.yml b/.ci/matrix-build-javas.yml index 85813a01a9d..202fd60edea 100644 --- a/.ci/matrix-build-javas.yml +++ b/.ci/matrix-build-javas.yml @@ -7,4 +7,3 @@ ES_BUILD_JAVA: - openjdk12 - - openjdk13 From cfdb1b771ebf9b006e5a9eb4f7e52e8d47b5d70a Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 5 Jun 2019 17:15:37 +0200 Subject: [PATCH 075/210] Enable console audit logs for docker backport#42671 #42887 Enable audit logs in docker by creating console appenders for audit loggers. also rename field @timestamp to timestamp and add field type with value audit The docker build contains now two log4j configuration for oss or default versions. The build now allows override the default configuration. Also changed the format of a timestamp from ISO8601 to include time zone as per this discussion #36833 (comment) closes #42666 backport#42671 --- distribution/docker/build.gradle | 5 + .../src/docker/config/log4j2.properties | 121 ++++++++++++++++++ .../src/docker/config/log4j2.properties | 0 .../core/src/main/config/log4j2.properties | 3 +- .../audit/logfile/LoggingAuditTrail.java | 2 + .../audit/logfile/LoggingAuditTrailTests.java | 6 +- 6 files changed, 134 insertions(+), 3 deletions(-) create mode 100644 distribution/docker/docker-build-context/src/docker/config/log4j2.properties rename distribution/docker/{ => oss-docker-build-context}/src/docker/config/log4j2.properties (100%) diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index ab55c737783..e9901251375 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -44,7 +44,12 @@ project.ext { } into('config') { + /* + * Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration + * by creating config files in oss or default build-context sub-modules. + */ from project.projectDir.toPath().resolve("src/docker/config") + from project.projectDir.toPath().resolve(oss ? "oss-docker-build-context" : "docker-build-context").resolve("src/docker/config") } from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) { diff --git a/distribution/docker/docker-build-context/src/docker/config/log4j2.properties b/distribution/docker/docker-build-context/src/docker/config/log4j2.properties new file mode 100644 index 00000000000..40be55d2e1c --- /dev/null +++ b/distribution/docker/docker-build-context/src/docker/config/log4j2.properties @@ -0,0 +1,121 @@ +status = error + +# log action execution errors for easier debugging +logger.action.name = org.elasticsearch.action +logger.action.level = debug + +appender.rolling.type = Console +appender.rolling.name = rolling +appender.rolling.layout.type = ESJsonLayout +appender.rolling.layout.type_name = server + +rootLogger.level = info +rootLogger.appenderRef.rolling.ref = rolling + +appender.deprecation_rolling.type = Console +appender.deprecation_rolling.name = deprecation_rolling +appender.deprecation_rolling.layout.type = ESJsonLayout +appender.deprecation_rolling.layout.type_name = deprecation + +logger.deprecation.name = org.elasticsearch.deprecation +logger.deprecation.level = warn +logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling +logger.deprecation.additivity = false + +appender.index_search_slowlog_rolling.type = Console +appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling +appender.index_search_slowlog_rolling.layout.type = ESJsonLayout +appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog + +logger.index_search_slowlog_rolling.name = index.search.slowlog +logger.index_search_slowlog_rolling.level = trace +logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling +logger.index_search_slowlog_rolling.additivity = false + +appender.index_indexing_slowlog_rolling.type = Console +appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling +appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout +appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog + +logger.index_indexing_slowlog.name = index.indexing.slowlog.index +logger.index_indexing_slowlog.level = trace +logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling +logger.index_indexing_slowlog.additivity = false + +appender.audit_rolling.type = Console +appender.audit_rolling.name = audit_rolling +appender.audit_rolling.layout.type = PatternLayout +appender.audit_rolling.layout.pattern = {\ +"type": "audit", \ +"timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZ}"\ +%varsNotEmpty{, "node.name":"%enc{%map{node.name}}{JSON}"}\ +%varsNotEmpty{, "node.id":"%enc{%map{node.id}}{JSON}"}\ +%varsNotEmpty{, "host.name":"%enc{%map{host.name}}{JSON}"}\ +%varsNotEmpty{, "host.ip":"%enc{%map{host.ip}}{JSON}"}\ +%varsNotEmpty{, "event.type":"%enc{%map{event.type}}{JSON}"}\ +%varsNotEmpty{, "event.action":"%enc{%map{event.action}}{JSON}"}\ +%varsNotEmpty{, "user.name":"%enc{%map{user.name}}{JSON}"}\ +%varsNotEmpty{, "user.run_by.name":"%enc{%map{user.run_by.name}}{JSON}"}\ +%varsNotEmpty{, "user.run_as.name":"%enc{%map{user.run_as.name}}{JSON}"}\ +%varsNotEmpty{, "user.realm":"%enc{%map{user.realm}}{JSON}"}\ +%varsNotEmpty{, "user.run_by.realm":"%enc{%map{user.run_by.realm}}{JSON}"}\ +%varsNotEmpty{, "user.run_as.realm":"%enc{%map{user.run_as.realm}}{JSON}"}\ +%varsNotEmpty{, "user.roles":%map{user.roles}}\ +%varsNotEmpty{, "origin.type":"%enc{%map{origin.type}}{JSON}"}\ +%varsNotEmpty{, "origin.address":"%enc{%map{origin.address}}{JSON}"}\ +%varsNotEmpty{, "realm":"%enc{%map{realm}}{JSON}"}\ +%varsNotEmpty{, "url.path":"%enc{%map{url.path}}{JSON}"}\ +%varsNotEmpty{, "url.query":"%enc{%map{url.query}}{JSON}"}\ +%varsNotEmpty{, "request.method":"%enc{%map{request.method}}{JSON}"}\ +%varsNotEmpty{, "request.body":"%enc{%map{request.body}}{JSON}"}\ +%varsNotEmpty{, "request.id":"%enc{%map{request.id}}{JSON}"}\ +%varsNotEmpty{, "action":"%enc{%map{action}}{JSON}"}\ +%varsNotEmpty{, "request.name":"%enc{%map{request.name}}{JSON}"}\ +%varsNotEmpty{, "indices":%map{indices}}\ +%varsNotEmpty{, "opaque_id":"%enc{%map{opaque_id}}{JSON}"}\ +%varsNotEmpty{, "x_forwarded_for":"%enc{%map{x_forwarded_for}}{JSON}"}\ +%varsNotEmpty{, "transport.profile":"%enc{%map{transport.profile}}{JSON}"}\ +%varsNotEmpty{, "rule":"%enc{%map{rule}}{JSON}"}\ +%varsNotEmpty{, "event.category":"%enc{%map{event.category}}{JSON}"}\ +}%n +# "node.name" node name from the `elasticsearch.yml` settings +# "node.id" node id which should not change between cluster restarts +# "host.name" unresolved hostname of the local node +# "host.ip" the local bound ip (i.e. the ip listening for connections) +# "event.type" a received REST request is translated into one or more transport requests. This indicates which processing layer generated the event "rest" or "transport" (internal) +# "event.action" the name of the audited event, eg. "authentication_failed", "access_granted", "run_as_granted", etc. +# "user.name" the subject name as authenticated by a realm +# "user.run_by.name" the original authenticated subject name that is impersonating another one. +# "user.run_as.name" if this "event.action" is of a run_as type, this is the subject name to be impersonated as. +# "user.realm" the name of the realm that authenticated "user.name" +# "user.run_by.realm" the realm name of the impersonating subject ("user.run_by.name") +# "user.run_as.realm" if this "event.action" is of a run_as type, this is the realm name the impersonated user is looked up from +# "user.roles" the roles array of the user; these are the roles that are granting privileges +# "origin.type" it is "rest" if the event is originating (is in relation to) a REST request; possible other values are "transport" and "ip_filter" +# "origin.address" the remote address and port of the first network hop, i.e. a REST proxy or another cluster node +# "realm" name of a realm that has generated an "authentication_failed" or an "authentication_successful"; the subject is not yet authenticated +# "url.path" the URI component between the port and the query string; it is percent (URL) encoded +# "url.query" the URI component after the path and before the fragment; it is percent (URL) encoded +# "request.method" the method of the HTTP request, i.e. one of GET, POST, PUT, DELETE, OPTIONS, HEAD, PATCH, TRACE, CONNECT +# "request.body" the content of the request body entity, JSON escaped +# "request.id" a synthentic identifier for the incoming request, this is unique per incoming request, and consistent across all audit events generated by that request +# "action" an action is the most granular operation that is authorized and this identifies it in a namespaced way (internal) +# "request.name" if the event is in connection to a transport message this is the name of the request class, similar to how rest requests are identified by the url path (internal) +# "indices" the array of indices that the "action" is acting upon +# "opaque_id" opaque value conveyed by the "X-Opaque-Id" request header +# "x_forwarded_for" the addresses from the "X-Forwarded-For" request header, as a verbatim string value (not an array) +# "transport.profile" name of the transport profile in case this is a "connection_granted" or "connection_denied" event +# "rule" name of the applied rulee if the "origin.type" is "ip_filter" +# "event.category" fixed value "elasticsearch-audit" + +logger.xpack_security_audit_logfile.name = org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail +logger.xpack_security_audit_logfile.level = info +logger.xpack_security_audit_logfile.appenderRef.audit_rolling.ref = audit_rolling +logger.xpack_security_audit_logfile.additivity = false + +logger.xmlsig.name = org.apache.xml.security.signature.XMLSignature +logger.xmlsig.level = error +logger.samlxml_decrypt.name = org.opensaml.xmlsec.encryption.support.Decrypter +logger.samlxml_decrypt.level = fatal +logger.saml2_decrypt.name = org.opensaml.saml.saml2.encryption.Decrypter +logger.saml2_decrypt.level = fatal diff --git a/distribution/docker/src/docker/config/log4j2.properties b/distribution/docker/oss-docker-build-context/src/docker/config/log4j2.properties similarity index 100% rename from distribution/docker/src/docker/config/log4j2.properties rename to distribution/docker/oss-docker-build-context/src/docker/config/log4j2.properties diff --git a/x-pack/plugin/core/src/main/config/log4j2.properties b/x-pack/plugin/core/src/main/config/log4j2.properties index 2b7e112eb14..c37faf84afb 100644 --- a/x-pack/plugin/core/src/main/config/log4j2.properties +++ b/x-pack/plugin/core/src/main/config/log4j2.properties @@ -3,7 +3,8 @@ appender.audit_rolling.name = audit_rolling appender.audit_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_audit.json appender.audit_rolling.layout.type = PatternLayout appender.audit_rolling.layout.pattern = {\ - "@timestamp":"%d{ISO8601}"\ + "type":"audit", \ + "timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZ}"\ %varsNotEmpty{, "node.name":"%enc{%map{node.name}}{JSON}"}\ %varsNotEmpty{, "node.id":"%enc{%map{node.id}}{JSON}"}\ %varsNotEmpty{, "host.name":"%enc{%map{host.name}}{JSON}"}\ diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index f4e59f8c4db..b7ccdeac686 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -85,6 +85,8 @@ public class LoggingAuditTrail implements AuditTrail, ClusterStateListener { public static final String IP_FILTER_ORIGIN_FIELD_VALUE = "ip_filter"; // changing any of this names requires changing the log4j2.properties file too + public static final String LOG_TYPE = "type"; + public static final String TIMESTAMP = "timestamp"; public static final String ORIGIN_TYPE_FIELD_NAME = "origin.type"; public static final String ORIGIN_ADDRESS_FIELD_NAME = "origin.address"; public static final String NODE_NAME_FIELD_NAME = "node.name"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 55d5bd579c1..c236c605b30 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -607,7 +607,7 @@ public class LoggingAuditTrailTests extends ESTestCase { indicesRequest(message, checkedFields, checkedArrayFields); opaqueId(threadContext, checkedFields); forwardedFor(threadContext, checkedFields); - + assertMsg(logger, checkedFields.immutableMap(), checkedArrayFields.immutableMap()); // test disabled @@ -1063,7 +1063,9 @@ public class LoggingAuditTrailTests extends ESTestCase { logLine = logEntryFieldPattern.matcher(logLine).replaceFirst(""); } } - logLine = logLine.replaceFirst("\"@timestamp\":\"[^\"]*\"", "").replaceAll("[{},]", ""); + logLine = logLine.replaceFirst("\"" + LoggingAuditTrail.LOG_TYPE + "\":\"audit\", ", "") + .replaceFirst("\"" + LoggingAuditTrail.TIMESTAMP + "\":\"[^\"]*\"", "") + .replaceAll("[{},]", ""); // check no extra fields assertThat("Log event has extra unexpected content: " + logLine, Strings.hasText(logLine), is(false)); } From 84eab4eba12308e90e70b215ab789c2bd3ce6377 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 5 Jun 2019 10:08:30 -0700 Subject: [PATCH 076/210] Omit JDK sources archive from bundled JDK (#42821) (cherry picked from commit 71d1454fe5ecc222801731a5f0e0e1053dc8997e) --- distribution/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/distribution/build.gradle b/distribution/build.gradle index 940cbd0369a..65c7cbadb50 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -378,6 +378,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 } + if (details.name == 'src.zip') { + details.exclude() + } } } } From 790d2124f62984a6f31e89b4d1a6ca9899a57b83 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 5 Jun 2019 10:20:46 -0700 Subject: [PATCH 077/210] Clean Up Painless Datetime Docs (#42869) This change abstracts the specific types away from the different representations of datetime as a datetime representation in code can be all kinds of different things. This defines the three most common types of datetimes as numeric, string, and complex while outlining the type most typically used for these as long, String, and ZonedDateTime, respectively. Documentation uses the definitions while examples use the types. This makes the documentation easier to consume especially for people from a non-Java background. --- .../painless-guide/painless-datetime.asciidoc | 142 ++++++++++-------- 1 file changed, 79 insertions(+), 63 deletions(-) diff --git a/docs/painless/painless-guide/painless-datetime.asciidoc b/docs/painless/painless-guide/painless-datetime.asciidoc index ef8ef8fd8c8..11f38475244 100644 --- a/docs/painless/painless-guide/painless-datetime.asciidoc +++ b/docs/painless/painless-guide/painless-datetime.asciidoc @@ -15,37 +15,38 @@ from the following Java packages are available to use in Painless scripts: ==== Datetime Representation -Datetimes in Painless are most commonly represented as a -<>, a <>, or a -<>. +Datetimes in Painless are most commonly represented as a numeric value, a +string value, or a complex value. -long:: represents a datetime as the number of milliseconds or nanoseconds since -epoch (1970-01-01T00:00:00Z) -String:: represents a datetime as a sequence of characters defined by a -well-known standard such as https://en.wikipedia.org/wiki/ISO_8601[ISO 8601] or -defined by the source of input in a custom way -ZonedDateTime:: a <> (object) that contains an -internal representation of a datetime and provides numerous -<> for -modification and comparison. +numeric:: a datetime representation as a number from a starting offset called +an epoch; in Painless this is typically a <> as +milliseconds since an epoch of 1970-01-01 00:00:00 Zulu Time +string:: a datetime representation as a sequence of characters defined by +a standard format or a custom format; in Painless this is typically a +<> of the standard format +https://en.wikipedia.org/wiki/ISO_8601[ISO 8601] +complex:: a datetime representation as a complex type +(<>) that abstracts away internal details of how the +datetime is stored and often provides utilities for modification and +comparison; in Painless this is typically a +<> Switching between different representations of datetimes is often necessary to achieve a script's objective(s). A typical pattern in a script is to switch a -long or String representation of a datetime to a ZonedDateTime representation, -modify or compare the ZonedDateTime representation, and then switch it back to -a long or String representation for storage or as a returned result. +numeric or string datetime to a complex datetime, modify or compare the complex +datetime, and then switch it back to a numeric or string datetime for storage +or to return a result. ==== Datetime Parsing and Formatting -Datetime parsing is a switch from a String representation to a ZonedDateTime -representation, and datetime formatting is a switch from a ZonedDateTime -representation to a String representation. +Datetime parsing is a switch from a string datetime to a complex datetime, and +datetime formatting is a switch from a complex datetime to a string datetime. A <> is a -<> (object) that defines the allowed sequence -of characters for a String representation of a datetime. Datetime parsing and -formatting often requires a DateTimeFormatter. For more information about how -to use a DateTimeFormatter see the +complex type (<>) that defines the allowed sequence +of characters for a string datetime. Datetime parsing and formatting often +requires a DateTimeFormatter. For more information about how to use a +DateTimeFormatter see the {java11-javadoc}/java.base/java/time/format/DateTimeFormatter.html[Java documentation]. ===== Datetime Parsing Examples @@ -92,7 +93,7 @@ Note the use of a custom DateTimeFormatter. ===== Datetime Formatting Examples -* format to a String (ISO 8601) +* format to ISO 8601 + [source,Painless] ---- @@ -102,7 +103,7 @@ String datetime = zdt.format(DateTimeFormatter.ISO_INSTANT); ---- Note the use of a built-in DateTimeFormatter. + -* format to a String (custom) +* format to a custom format + [source,Painless] ---- @@ -116,8 +117,8 @@ Note the use of a custom DateTimeFormatter. ==== Datetime Conversion -Datetime conversion is a switch from a long representation to a ZonedDateTime -representation and vice versa. +Datetime conversion is a switch from a numeric datetime to a complex datetime +and vice versa. ===== Datetime Conversion Examples @@ -141,16 +142,14 @@ long milliSinceEpoch = zdt.toInstant().toEpochMilli(); ==== Datetime Pieces -Use the ZonedDateTime -<> to create a new -ZonedDateTime from pieces (year, month, day, hour, minute, second, nano, -time zone). Use ZonedDateTime -<> to extract pieces from -a ZonedDateTime. +Datetime representations often contain the data to extract individual datetime +pieces such as year, hour, timezone, etc. Use individual pieces of a datetime +to create a complex datetime, and use a complex datetime to extract individual +pieces. ===== Datetime Pieces Examples -* create a ZonedDateTime from pieces +* create a complex datetime from pieces + [source,Painless] ---- @@ -165,7 +164,7 @@ ZonedDateTime zdt = ZonedDateTime.of( year, month, day, hour, minutes, seconds, nanos, ZoneId.of('Z')); ---- + -* extract pieces from a ZonedDateTime +* extract pieces from a complex datetime + [source,Painless] ---- @@ -182,17 +181,18 @@ int nanos = zdt.getNano(); ==== Datetime Modification -Use either a long or a ZonedDateTime to do datetime modification such as adding -several seconds to a datetime or subtracting several days from a datetime. Use -standard <> to modify a long -representation of a datetime. Use ZonedDateTime -<> to modify a -ZonedDateTime representation of a datetime. Note most modification methods for -a ZonedDateTime return a new instance for assignment or immediate use. +Use either a numeric datetime or a complex datetime to do modification such as +adding several seconds to a datetime or subtracting several days from a +datetime. Use standard <> to +modify a numeric datetime. Use +<> (or fields) to modify +a complex datetime. Note many complex datetimes are immutable so upon +modification a new complex datetime is created that requires +<> or immediate use. ===== Datetime Modification Examples -* Subtract three seconds from milliseconds +* Subtract three seconds from a numeric datetime in milliseconds + [source,Painless] ---- @@ -200,7 +200,7 @@ long milliSinceEpoch = 434931330000L; milliSinceEpoch = milliSinceEpoch - 1000L*3L; ---- + -* Add three days to a datetime +* Add three days to a complex datetime + [source,Painless] ---- @@ -209,7 +209,7 @@ ZonedDateTime zdt = ZonedDateTime updatedZdt = zdt.plusDays(3); ---- + -* Subtract 125 minutes from a datetime +* Subtract 125 minutes from a complex datetime + [source,Painless] ---- @@ -218,7 +218,7 @@ ZonedDateTime zdt = ZonedDateTime updatedZdt = zdt.minusMinutes(125); ---- + -* Set the year on a datetime +* Set the year on a complex datetime + [source,Painless] ---- @@ -227,18 +227,20 @@ ZonedDateTime zdt = ZonedDateTime updatedZdt = zdt.withYear(1976); ---- -==== Elapsed Time +==== Datetime Difference (Elapsed Time) -Use either two longs or two ZonedDateTimes to calculate an elapsed -time (difference) between two datetimes. Use -<> to calculate an elapsed time -between two longs of the same time unit such as milliseconds. For more complex -datetimes. use <> to -calculate the difference between two ZonedDateTimes. +Use either two numeric datetimes or two complex datetimes to calculate the +difference (elapsed time) between two different datetimes. Use +<> to calculate the difference between +between two numeric datetimes of the same time unit such as milliseconds. For +complex datetimes there is often a method or another complex type +(<>) available to calculate the difference. Use +<> +to calculate the difference between two complex datetimes if supported. ===== Elapsed Time Examples -* Elapsed time for two millisecond datetimes +* Difference in milliseconds between two numeric datetimes + [source,Painless] ---- @@ -247,7 +249,7 @@ long endTimestamp = 434931330000L; long differenceInMillis = endTimestamp - startTimestamp; ---- + -* Elapsed time in milliseconds for two datetimes +* Difference in milliseconds between two complex datetimes + [source,Painless] ---- @@ -258,7 +260,7 @@ ZonedDateTime zdt2 = long differenceInMillis = ChronoUnit.MILLIS.between(zdt1, zdt2); ---- + -* Elapsed time in days for two datetimes +* Difference in days between two complex datetimes + [source,Painless] ---- @@ -271,15 +273,15 @@ long differenceInDays = ChronoUnit.DAYS.between(zdt1, zdt2); ==== Datetime Comparison -Use either two longs or two ZonedDateTimes to do a datetime comparison. Use -standard <> to compare two -longs of the same time unit such as milliseconds. For more complex datetimes, -use ZonedDateTime <> to -compare two ZonedDateTimes. +Use either two numeric datetimes or two complex datetimes to do a datetime +comparison. Use standard <> +to compare two numeric datetimes of the same time unit such as milliseconds. +For complex datetimes there is often a method or another complex type +(<>) available to do the comparison. ===== Datetime Comparison Examples -* Comparison of two millisecond datetimes +* Greater than comparison of two numeric datetimes in milliseconds + [source,Painless] ---- @@ -291,7 +293,21 @@ if (timestamp1 > timestamp2) { } ---- + -* Before comparision of two datetimes +* Equality comparision of two complex datetimes ++ +[source,Painless] +---- +ZonedDateTime zdt1 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); +ZonedDateTime zdt2 = + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z')); + +if (zdt1.equals(zdt2)) { + // handle condition +} +---- ++ +* Less than comparision of two complex datetimes + [source,Painless] ---- @@ -305,7 +321,7 @@ if (zdt1.isBefore(zdt2)) { } ---- + -* After comparision of two datetimes +* Greater than comparision of two complex datetimes + [source,Painless] ---- From ab5bc8359757aacdfcd16079c0847da8f82d5a6f Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 5 Jun 2019 19:50:04 +0200 Subject: [PATCH 078/210] Deprecation info for joda-java migration on 7.x (#42659) Some clusters might have been already migrated to version 7 without being warned about the joda-java migration changes. Deprecation api on that version will give them guidance on what patterns need to be changed. relates. This change is using the same logic like in 6.8 that is: verifying the pattern is from the incompatible set ('y'-Y', 'C', 'Z' etc), not from predifined set, not prefixed with 8. AND was also created in 6.x. Mappings created in 7.x are considered migrated and should not generate warnings There is no pipeline check (present on 6.8) as it is impossible to verify when the pipeline was created, and therefore to make sure the format is depracated or not #42010 --- .../org/elasticsearch/common/joda/Joda.java | 174 ++++++++-------- .../common/joda/JodaDeprecationPatterns.java | 94 +++++++++ .../common/time/DateFormatter.java | 23 ++- .../common/time/DateFormatters.java | 162 +++++++-------- .../common/time/FormatNames.java | 128 ++++++++++++ .../joda/JavaJodaTimeDuellingTests.java | 44 ++++ .../org/elasticsearch/test/ESTestCase.java | 44 +++- .../deprecation/ClusterDeprecationChecks.java | 1 - .../xpack/deprecation/DeprecationChecks.java | 3 +- .../deprecation/IndexDeprecationChecks.java | 57 +++++- .../IndexDeprecationChecksTests.java | 191 ++++++++++++++++++ 11 files changed, 725 insertions(+), 196 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/joda/JodaDeprecationPatterns.java create mode 100644 server/src/main/java/org/elasticsearch/common/time/FormatNames.java diff --git a/server/src/main/java/org/elasticsearch/common/joda/Joda.java b/server/src/main/java/org/elasticsearch/common/joda/Joda.java index 45587f6bb3d..977e1ad56dc 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/server/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; import org.joda.time.Chronology; import org.joda.time.DateTime; import org.joda.time.DateTimeField; @@ -65,173 +66,173 @@ public class Joda { } DateTimeFormatter formatter; - if ("basicDate".equals(input) || "basic_date".equals(input)) { + if (FormatNames.BASIC_DATE.matches(input)) { formatter = ISODateTimeFormat.basicDate(); - } else if ("basicDateTime".equals(input) || "basic_date_time".equals(input)) { + } else if (FormatNames.BASIC_DATE_TIME.matches(input) ) { formatter = ISODateTimeFormat.basicDateTime(); - } else if ("basicDateTimeNoMillis".equals(input) || "basic_date_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_DATE_TIME_NO_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.basicDateTimeNoMillis(); - } else if ("basicOrdinalDate".equals(input) || "basic_ordinal_date".equals(input)) { + } else if (FormatNames.BASIC_ORDINAL_DATE.matches(input) ) { formatter = ISODateTimeFormat.basicOrdinalDate(); - } else if ("basicOrdinalDateTime".equals(input) || "basic_ordinal_date_time".equals(input)) { + } else if (FormatNames.BASIC_ORDINAL_DATE_TIME.matches(input) ) { formatter = ISODateTimeFormat.basicOrdinalDateTime(); - } else if ("basicOrdinalDateTimeNoMillis".equals(input) || "basic_ordinal_date_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.basicOrdinalDateTimeNoMillis(); - } else if ("basicTime".equals(input) || "basic_time".equals(input)) { + } else if (FormatNames.BASIC_TIME.matches(input) ) { formatter = ISODateTimeFormat.basicTime(); - } else if ("basicTimeNoMillis".equals(input) || "basic_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_TIME_NO_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.basicTimeNoMillis(); - } else if ("basicTTime".equals(input) || "basic_t_time".equals(input)) { + } else if (FormatNames.BASIC_T_TIME.matches(input) ) { formatter = ISODateTimeFormat.basicTTime(); - } else if ("basicTTimeNoMillis".equals(input) || "basic_t_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_T_TIME_NO_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.basicTTimeNoMillis(); - } else if ("basicWeekDate".equals(input) || "basic_week_date".equals(input)) { + } else if (FormatNames.BASIC_WEEK_DATE.matches(input)) { formatter = ISODateTimeFormat.basicWeekDate(); - } else if ("basicWeekDateTime".equals(input) || "basic_week_date_time".equals(input)) { + } else if (FormatNames.BASIC_WEEK_DATE_TIME.matches(input) ) { formatter = ISODateTimeFormat.basicWeekDateTime(); - } else if ("basicWeekDateTimeNoMillis".equals(input) || "basic_week_date_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS.matches(input)) { formatter = ISODateTimeFormat.basicWeekDateTimeNoMillis(); - } else if ("date".equals(input)) { + } else if (FormatNames.DATE.matches(input)) { formatter = ISODateTimeFormat.date(); - } else if ("dateHour".equals(input) || "date_hour".equals(input)) { + } else if (FormatNames.DATE_HOUR.matches(input)) { formatter = ISODateTimeFormat.dateHour(); - } else if ("dateHourMinute".equals(input) || "date_hour_minute".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE.matches(input)) { formatter = ISODateTimeFormat.dateHourMinute(); - } else if ("dateHourMinuteSecond".equals(input) || "date_hour_minute_second".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE_SECOND.matches(input)) { formatter = ISODateTimeFormat.dateHourMinuteSecond(); - } else if ("dateHourMinuteSecondFraction".equals(input) || "date_hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION.matches(input)) { formatter = ISODateTimeFormat.dateHourMinuteSecondFraction(); - } else if ("dateHourMinuteSecondMillis".equals(input) || "date_hour_minute_second_millis".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.dateHourMinuteSecondMillis(); - } else if ("dateOptionalTime".equals(input) || "date_optional_time".equals(input)) { + } else if (FormatNames.DATE_OPTIONAL_TIME.matches(input)) { // in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print // this sucks we should use the root local by default and not be dependent on the node return new JodaDateFormatter(input, ISODateTimeFormat.dateOptionalTimeParser().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC).withDefaultYear(1970), ISODateTimeFormat.dateTime().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC).withDefaultYear(1970)); - } else if ("dateTime".equals(input) || "date_time".equals(input)) { + } else if (FormatNames.DATE_TIME.matches(input)) { formatter = ISODateTimeFormat.dateTime(); - } else if ("dateTimeNoMillis".equals(input) || "date_time_no_millis".equals(input)) { + } else if (FormatNames.DATE_TIME_NO_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.dateTimeNoMillis(); - } else if ("hour".equals(input)) { + } else if (FormatNames.HOUR.matches(input)) { formatter = ISODateTimeFormat.hour(); - } else if ("hourMinute".equals(input) || "hour_minute".equals(input)) { + } else if (FormatNames.HOUR_MINUTE.matches(input)) { formatter = ISODateTimeFormat.hourMinute(); - } else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) { + } else if (FormatNames.HOUR_MINUTE_SECOND.matches(input) ) { formatter = ISODateTimeFormat.hourMinuteSecond(); - } else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.HOUR_MINUTE_SECOND_FRACTION.matches(input)) { formatter = ISODateTimeFormat.hourMinuteSecondFraction(); - } else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) { + } else if (FormatNames.HOUR_MINUTE_SECOND_MILLIS.matches(input)) { formatter = ISODateTimeFormat.hourMinuteSecondMillis(); - } else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) { + } else if (FormatNames.ORDINAL_DATE.matches(input)) { formatter = ISODateTimeFormat.ordinalDate(); - } else if ("ordinalDateTime".equals(input) || "ordinal_date_time".equals(input)) { + } else if (FormatNames.ORDINAL_DATE_TIME.matches(input)) { formatter = ISODateTimeFormat.ordinalDateTime(); - } else if ("ordinalDateTimeNoMillis".equals(input) || "ordinal_date_time_no_millis".equals(input)) { + } else if (FormatNames.ORDINAL_DATE_TIME_NO_MILLIS.matches(input) ) { formatter = ISODateTimeFormat.ordinalDateTimeNoMillis(); - } else if ("time".equals(input)) { + } else if (FormatNames.TIME.matches(input)) { formatter = ISODateTimeFormat.time(); - } else if ("timeNoMillis".equals(input) || "time_no_millis".equals(input)) { + } else if (FormatNames.TIME_NO_MILLIS.matches(input)) { formatter = ISODateTimeFormat.timeNoMillis(); - } else if ("tTime".equals(input) || "t_time".equals(input)) { + } else if (FormatNames.T_TIME.matches(input)) { formatter = ISODateTimeFormat.tTime(); - } else if ("tTimeNoMillis".equals(input) || "t_time_no_millis".equals(input)) { + } else if (FormatNames.T_TIME_NO_MILLIS.matches(input)) { formatter = ISODateTimeFormat.tTimeNoMillis(); - } else if ("weekDate".equals(input) || "week_date".equals(input)) { + } else if (FormatNames.WEEK_DATE.matches(input)) { formatter = ISODateTimeFormat.weekDate(); - } else if ("weekDateTime".equals(input) || "week_date_time".equals(input)) { + } else if (FormatNames.WEEK_DATE_TIME.matches(input)) { formatter = ISODateTimeFormat.weekDateTime(); - } else if ("weekDateTimeNoMillis".equals(input) || "week_date_time_no_millis".equals(input)) { + } else if (FormatNames.WEEK_DATE_TIME_NO_MILLIS.matches(input)) { formatter = ISODateTimeFormat.weekDateTimeNoMillis(); - } else if ("weekyear".equals(input) || "week_year".equals(input)) { + } else if (FormatNames.WEEK_YEAR.matches(input)) { formatter = ISODateTimeFormat.weekyear(); - } else if ("weekyearWeek".equals(input) || "weekyear_week".equals(input)) { + } else if (FormatNames.WEEK_YEAR_WEEK.matches(input)) { formatter = ISODateTimeFormat.weekyearWeek(); - } else if ("weekyearWeekDay".equals(input) || "weekyear_week_day".equals(input)) { + } else if (FormatNames.WEEKYEAR_WEEK_DAY.matches(input)) { formatter = ISODateTimeFormat.weekyearWeekDay(); - } else if ("year".equals(input)) { + } else if (FormatNames.YEAR.matches(input)) { formatter = ISODateTimeFormat.year(); - } else if ("yearMonth".equals(input) || "year_month".equals(input)) { + } else if (FormatNames.YEAR_MONTH.matches(input) ) { formatter = ISODateTimeFormat.yearMonth(); - } else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) { + } else if (FormatNames.YEAR_MONTH_DAY.matches(input)) { formatter = ISODateTimeFormat.yearMonthDay(); - } else if ("epoch_second".equals(input)) { + } else if (FormatNames.EPOCH_SECOND.matches(input)) { formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false), new EpochTimeParser(false)).toFormatter(); - } else if ("epoch_millis".equals(input)) { + } else if (FormatNames.EPOCH_MILLIS.matches(input)) { formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true), new EpochTimeParser(true)).toFormatter(); // strict date formats here, must be at least 4 digits for year and two for months and two for day - } else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) { + } else if (FormatNames.STRICT_BASIC_WEEK_DATE.matches(input) ) { formatter = StrictISODateTimeFormat.basicWeekDate(); - } else if ("strictBasicWeekDateTime".equals(input) || "strict_basic_week_date_time".equals(input)) { + } else if (FormatNames.STRICT_BASIC_WEEK_DATE_TIME.matches(input)) { formatter = StrictISODateTimeFormat.basicWeekDateTime(); - } else if ("strictBasicWeekDateTimeNoMillis".equals(input) || "strict_basic_week_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.basicWeekDateTimeNoMillis(); - } else if ("strictDate".equals(input) || "strict_date".equals(input)) { + } else if (FormatNames.STRICT_DATE.matches(input)) { formatter = StrictISODateTimeFormat.date(); - } else if ("strictDateHour".equals(input) || "strict_date_hour".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR.matches(input)) { formatter = StrictISODateTimeFormat.dateHour(); - } else if ("strictDateHourMinute".equals(input) || "strict_date_hour_minute".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE.matches(input)) { formatter = StrictISODateTimeFormat.dateHourMinute(); - } else if ("strictDateHourMinuteSecond".equals(input) || "strict_date_hour_minute_second".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND.matches(input)) { formatter = StrictISODateTimeFormat.dateHourMinuteSecond(); - } else if ("strictDateHourMinuteSecondFraction".equals(input) || "strict_date_hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION.matches(input)) { formatter = StrictISODateTimeFormat.dateHourMinuteSecondFraction(); - } else if ("strictDateHourMinuteSecondMillis".equals(input) || "strict_date_hour_minute_second_millis".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.dateHourMinuteSecondMillis(); - } else if ("strictDateOptionalTime".equals(input) || "strict_date_optional_time".equals(input)) { + } else if (FormatNames.STRICT_DATE_OPTIONAL_TIME.matches(input)) { // in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print // this sucks we should use the root local by default and not be dependent on the node return new JodaDateFormatter(input, StrictISODateTimeFormat.dateOptionalTimeParser().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC) .withDefaultYear(1970), StrictISODateTimeFormat.dateTime().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC).withDefaultYear(1970)); - } else if ("strictDateTime".equals(input) || "strict_date_time".equals(input)) { + } else if (FormatNames.STRICT_DATE_TIME.matches(input)) { formatter = StrictISODateTimeFormat.dateTime(); - } else if ("strictDateTimeNoMillis".equals(input) || "strict_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_DATE_TIME_NO_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.dateTimeNoMillis(); - } else if ("strictHour".equals(input) || "strict_hour".equals(input)) { + } else if (FormatNames.STRICT_HOUR.matches(input)) { formatter = StrictISODateTimeFormat.hour(); - } else if ("strictHourMinute".equals(input) || "strict_hour_minute".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE.matches(input)) { formatter = StrictISODateTimeFormat.hourMinute(); - } else if ("strictHourMinuteSecond".equals(input) || "strict_hour_minute_second".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE_SECOND.matches(input)) { formatter = StrictISODateTimeFormat.hourMinuteSecond(); - } else if ("strictHourMinuteSecondFraction".equals(input) || "strict_hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION.matches(input)) { formatter = StrictISODateTimeFormat.hourMinuteSecondFraction(); - } else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.hourMinuteSecondMillis(); - } else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) { + } else if (FormatNames.STRICT_ORDINAL_DATE.matches(input)) { formatter = StrictISODateTimeFormat.ordinalDate(); - } else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) { + } else if (FormatNames.STRICT_ORDINAL_DATE_TIME.matches(input)) { formatter = StrictISODateTimeFormat.ordinalDateTime(); - } else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.ordinalDateTimeNoMillis(); - } else if ("strictTime".equals(input) || "strict_time".equals(input)) { + } else if (FormatNames.STRICT_TIME.matches(input)) { formatter = StrictISODateTimeFormat.time(); - } else if ("strictTimeNoMillis".equals(input) || "strict_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_TIME_NO_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.timeNoMillis(); - } else if ("strictTTime".equals(input) || "strict_t_time".equals(input)) { + } else if (FormatNames.STRICT_T_TIME.matches(input) ) { formatter = StrictISODateTimeFormat.tTime(); - } else if ("strictTTimeNoMillis".equals(input) || "strict_t_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_T_TIME_NO_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.tTimeNoMillis(); - } else if ("strictWeekDate".equals(input) || "strict_week_date".equals(input)) { + } else if (FormatNames.STRICT_WEEK_DATE.matches(input)) { formatter = StrictISODateTimeFormat.weekDate(); - } else if ("strictWeekDateTime".equals(input) || "strict_week_date_time".equals(input)) { + } else if (FormatNames.STRICT_WEEK_DATE_TIME.matches(input)) { formatter = StrictISODateTimeFormat.weekDateTime(); - } else if ("strictWeekDateTimeNoMillis".equals(input) || "strict_week_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS.matches(input)) { formatter = StrictISODateTimeFormat.weekDateTimeNoMillis(); - } else if ("strictWeekyear".equals(input) || "strict_weekyear".equals(input)) { + } else if (FormatNames.STRICT_WEEKYEAR.matches(input) ) { formatter = StrictISODateTimeFormat.weekyear(); - } else if ("strictWeekyearWeek".equals(input) || "strict_weekyear_week".equals(input)) { + } else if (FormatNames.STRICT_WEEKYEAR_WEEK.matches(input)) { formatter = StrictISODateTimeFormat.weekyearWeek(); - } else if ("strictWeekyearWeekDay".equals(input) || "strict_weekyear_week_day".equals(input)) { + } else if (FormatNames.STRICT_WEEKYEAR_WEEK_DAY.matches(input)) { formatter = StrictISODateTimeFormat.weekyearWeekDay(); - } else if ("strictYear".equals(input) || "strict_year".equals(input)) { + } else if (FormatNames.STRICT_YEAR.matches(input)) { formatter = StrictISODateTimeFormat.year(); - } else if ("strictYearMonth".equals(input) || "strict_year_month".equals(input)) { + } else if (FormatNames.STRICT_YEAR_MONTH.matches(input)) { formatter = StrictISODateTimeFormat.yearMonth(); - } else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) { + } else if (FormatNames.STRICT_YEAR_MONTH_DAY.matches(input)) { formatter = StrictISODateTimeFormat.yearMonthDay(); } else if (Strings.hasLength(input) && input.contains("||")) { String[] formats = Strings.delimitedListToStringArray(input, "||"); @@ -267,18 +268,11 @@ public class Joda { return new JodaDateFormatter(input, formatter, formatter); } - private static void maybeLogJodaDeprecation(String input) { - if (input.contains("CC")) { - deprecationLogger.deprecatedAndMaybeLog("joda-century-of-era-format", - "Use of 'C' (century-of-era) is deprecated and will not be supported in the next major version of Elasticsearch."); - } - if (input.contains("YY")) { - deprecationLogger.deprecatedAndMaybeLog("joda-year-of-era-format", "Use of 'Y' (year-of-era) will change to 'y' in the" + - " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); - } - if (input.contains("xx")) { - deprecationLogger.deprecatedAndMaybeLog("joda-week-based-year-format","Use of 'x' (week-based-year) will change" + - " to 'Y' in the next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); + private static void maybeLogJodaDeprecation(String format) { + if (JodaDeprecationPatterns.isDeprecatedPattern(format)) { + String suggestion = JodaDeprecationPatterns.formatSuggestion(format); + deprecationLogger.deprecatedAndMaybeLog("joda-pattern-deprecation", + suggestion + " " + JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); } } diff --git a/server/src/main/java/org/elasticsearch/common/joda/JodaDeprecationPatterns.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDeprecationPatterns.java new file mode 100644 index 00000000000..6bd121da500 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDeprecationPatterns.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.joda; + +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; + +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +public class JodaDeprecationPatterns { + public static final String USE_NEW_FORMAT_SPECIFIERS = "Use new java.time date format specifiiers."; + private static Map JODA_PATTERNS_DEPRECATIONS = new LinkedHashMap<>(); + + static { + JODA_PATTERNS_DEPRECATIONS.put("Y", "'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year."); + JODA_PATTERNS_DEPRECATIONS.put("y", "'y' year should be replaced with 'u'. Use 'y' for year-of-era."); + JODA_PATTERNS_DEPRECATIONS.put("C", "'C' century of era is no longer supported."); + JODA_PATTERNS_DEPRECATIONS.put("x", "'x' weak-year should be replaced with 'Y'. Use 'x' for zone-offset."); + JODA_PATTERNS_DEPRECATIONS.put("Z", + "'Z' time zone offset/id fails when parsing 'Z' for Zulu timezone. Consider using 'X'."); + JODA_PATTERNS_DEPRECATIONS.put("z", + "'z' time zone text. Will print 'Z' for Zulu given UTC timezone."); + } + + /** + * Checks if date parsing pattern is deprecated. + * Deprecated here means: when it was not already prefixed with 8 (meaning already upgraded) + * and it is not a predefined pattern from FormatNames like basic_date_time_no_millis + * and it uses pattern characters which changed meaning from joda to java like Y becomes y. + * @param pattern - a format to be checked + * @return true if format is deprecated, otherwise false + */ + public static boolean isDeprecatedPattern(String pattern) { + List patterns = DateFormatter.splitCombinedPatterns(pattern); + + for (String subPattern : patterns) { + boolean isDeprecated = subPattern.startsWith("8") == false && FormatNames.exist(subPattern) == false && + JODA_PATTERNS_DEPRECATIONS.keySet().stream() + .filter(s -> subPattern.contains(s)) + .findAny() + .isPresent(); + if (isDeprecated) { + return true; + } + } + return false; + } + + /** + * Formats deprecation message for suggestion field in a warning header. + * Joins all warnings in a one message. + * @param pattern - a pattern to be formatted + * @return a formatted deprecation message + */ + public static String formatSuggestion(String pattern) { + List patterns = DateFormatter.splitCombinedPatterns(pattern); + + Set warnings = new LinkedHashSet<>(); + for (String subPattern : patterns) { + if (isDeprecatedPattern(subPattern)) { + String suggestion = JODA_PATTERNS_DEPRECATIONS.entrySet().stream() + .filter(s -> subPattern.contains(s.getKey())) + .map(s -> s.getValue()) + .collect(Collectors.joining("; ")); + warnings.add(suggestion); + } + } + String combinedWarning = warnings.stream() + .collect(Collectors.joining("; ")); + return combinedWarning; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java index bf7999067b0..3ed6d95efe5 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java @@ -31,6 +31,7 @@ import java.time.temporal.TemporalAccessor; import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.stream.Collectors; public interface DateFormatter { @@ -137,13 +138,10 @@ public interface DateFormatter { input = input.substring(1); } - List formatters = new ArrayList<>(); - for (String pattern : Strings.delimitedListToStringArray(input, "||")) { - if (Strings.hasLength(pattern) == false) { - throw new IllegalArgumentException("Cannot have empty element in multi date format pattern: " + input); - } - formatters.add(DateFormatters.forPattern(pattern)); - } + List patterns = splitCombinedPatterns(input); + List formatters = patterns.stream() + .map(DateFormatters::forPattern) + .collect(Collectors.toList()); if (formatters.size() == 1) { return formatters.get(0); @@ -151,4 +149,15 @@ public interface DateFormatter { return DateFormatters.merge(input, formatters); } + + static List splitCombinedPatterns(String input) { + List patterns = new ArrayList<>(); + for (String pattern : Strings.delimitedListToStringArray(input, "||")) { + if (Strings.hasLength(pattern) == false) { + throw new IllegalArgumentException("Cannot have empty element in multi date format pattern: " + input); + } + patterns.add(pattern); + } + return patterns; + } } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 330681e2624..c8aa15d9b92 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -1423,166 +1423,166 @@ public class DateFormatters { throw new IllegalArgumentException("No date pattern provided"); } - if ("iso8601".equals(input)) { + if (FormatNames.ISO8601.matches(input)) { return ISO_8601; - } else if ("basicDate".equals(input) || "basic_date".equals(input)) { + } else if (FormatNames.BASIC_DATE.matches(input) ) { return BASIC_DATE; - } else if ("basicDateTime".equals(input) || "basic_date_time".equals(input)) { + } else if (FormatNames.BASIC_DATE_TIME.matches(input) ) { return BASIC_DATE_TIME; - } else if ("basicDateTimeNoMillis".equals(input) || "basic_date_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_DATE_TIME_NO_MILLIS.matches(input) ) { return BASIC_DATE_TIME_NO_MILLIS; - } else if ("basicOrdinalDate".equals(input) || "basic_ordinal_date".equals(input)) { + } else if (FormatNames.BASIC_ORDINAL_DATE.matches(input) ) { return BASIC_ORDINAL_DATE; - } else if ("basicOrdinalDateTime".equals(input) || "basic_ordinal_date_time".equals(input)) { + } else if (FormatNames.BASIC_ORDINAL_DATE_TIME.matches(input) ) { return BASIC_ORDINAL_DATE_TIME; - } else if ("basicOrdinalDateTimeNoMillis".equals(input) || "basic_ordinal_date_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS.matches(input) ) { return BASIC_ORDINAL_DATE_TIME_NO_MILLIS; - } else if ("basicTime".equals(input) || "basic_time".equals(input)) { + } else if (FormatNames.BASIC_TIME.matches(input) ) { return BASIC_TIME; - } else if ("basicTimeNoMillis".equals(input) || "basic_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_TIME_NO_MILLIS.matches(input) ) { return BASIC_TIME_NO_MILLIS; - } else if ("basicTTime".equals(input) || "basic_t_time".equals(input)) { + } else if (FormatNames.BASIC_T_TIME.matches(input) ) { return BASIC_T_TIME; - } else if ("basicTTimeNoMillis".equals(input) || "basic_t_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_T_TIME_NO_MILLIS.matches(input) ) { return BASIC_T_TIME_NO_MILLIS; - } else if ("basicWeekDate".equals(input) || "basic_week_date".equals(input)) { + } else if (FormatNames.BASIC_WEEK_DATE.matches(input) ) { return BASIC_WEEK_DATE; - } else if ("basicWeekDateTime".equals(input) || "basic_week_date_time".equals(input)) { + } else if (FormatNames.BASIC_WEEK_DATE_TIME.matches(input) ) { return BASIC_WEEK_DATE_TIME; - } else if ("basicWeekDateTimeNoMillis".equals(input) || "basic_week_date_time_no_millis".equals(input)) { + } else if (FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS.matches(input) ) { return BASIC_WEEK_DATE_TIME_NO_MILLIS; - } else if ("date".equals(input)) { + } else if (FormatNames.DATE.matches(input)) { return DATE; - } else if ("dateHour".equals(input) || "date_hour".equals(input)) { + } else if (FormatNames.DATE_HOUR.matches(input) ) { return DATE_HOUR; - } else if ("dateHourMinute".equals(input) || "date_hour_minute".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE.matches(input) ) { return DATE_HOUR_MINUTE; - } else if ("dateHourMinuteSecond".equals(input) || "date_hour_minute_second".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE_SECOND.matches(input) ) { return DATE_HOUR_MINUTE_SECOND; - } else if ("dateHourMinuteSecondFraction".equals(input) || "date_hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION.matches(input) ) { return DATE_HOUR_MINUTE_SECOND_FRACTION; - } else if ("dateHourMinuteSecondMillis".equals(input) || "date_hour_minute_second_millis".equals(input)) { + } else if (FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS.matches(input) ) { return DATE_HOUR_MINUTE_SECOND_MILLIS; - } else if ("dateOptionalTime".equals(input) || "date_optional_time".equals(input)) { + } else if (FormatNames.DATE_OPTIONAL_TIME.matches(input) ) { return DATE_OPTIONAL_TIME; - } else if ("dateTime".equals(input) || "date_time".equals(input)) { + } else if (FormatNames.DATE_TIME.matches(input) ) { return DATE_TIME; - } else if ("dateTimeNoMillis".equals(input) || "date_time_no_millis".equals(input)) { + } else if (FormatNames.DATE_TIME_NO_MILLIS.matches(input) ) { return DATE_TIME_NO_MILLIS; - } else if ("hour".equals(input)) { + } else if (FormatNames.HOUR.matches(input)) { return HOUR; - } else if ("hourMinute".equals(input) || "hour_minute".equals(input)) { + } else if (FormatNames.HOUR_MINUTE.matches(input) ) { return HOUR_MINUTE; - } else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) { + } else if (FormatNames.HOUR_MINUTE_SECOND.matches(input) ) { return HOUR_MINUTE_SECOND; - } else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.HOUR_MINUTE_SECOND_FRACTION.matches(input) ) { return HOUR_MINUTE_SECOND_FRACTION; - } else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) { + } else if (FormatNames.HOUR_MINUTE_SECOND_MILLIS.matches(input) ) { return HOUR_MINUTE_SECOND_MILLIS; - } else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) { + } else if (FormatNames.ORDINAL_DATE.matches(input) ) { return ORDINAL_DATE; - } else if ("ordinalDateTime".equals(input) || "ordinal_date_time".equals(input)) { + } else if (FormatNames.ORDINAL_DATE_TIME.matches(input) ) { return ORDINAL_DATE_TIME; - } else if ("ordinalDateTimeNoMillis".equals(input) || "ordinal_date_time_no_millis".equals(input)) { + } else if (FormatNames.ORDINAL_DATE_TIME_NO_MILLIS.matches(input) ) { return ORDINAL_DATE_TIME_NO_MILLIS; - } else if ("time".equals(input)) { + } else if (FormatNames.TIME.matches(input)) { return TIME; - } else if ("timeNoMillis".equals(input) || "time_no_millis".equals(input)) { + } else if (FormatNames.TIME_NO_MILLIS.matches(input) ) { return TIME_NO_MILLIS; - } else if ("tTime".equals(input) || "t_time".equals(input)) { + } else if (FormatNames.T_TIME.matches(input) ) { return T_TIME; - } else if ("tTimeNoMillis".equals(input) || "t_time_no_millis".equals(input)) { + } else if (FormatNames.T_TIME_NO_MILLIS.matches(input) ) { return T_TIME_NO_MILLIS; - } else if ("weekDate".equals(input) || "week_date".equals(input)) { + } else if (FormatNames.WEEK_DATE.matches(input) ) { return WEEK_DATE; - } else if ("weekDateTime".equals(input) || "week_date_time".equals(input)) { + } else if (FormatNames.WEEK_DATE_TIME.matches(input) ) { return WEEK_DATE_TIME; - } else if ("weekDateTimeNoMillis".equals(input) || "week_date_time_no_millis".equals(input)) { + } else if (FormatNames.WEEK_DATE_TIME_NO_MILLIS.matches(input) ) { return WEEK_DATE_TIME_NO_MILLIS; - } else if ("weekyear".equals(input) || "week_year".equals(input)) { + } else if (FormatNames.WEEK_YEAR.matches(input) ) { return WEEK_YEAR; - } else if ("weekyearWeek".equals(input) || "weekyear_week".equals(input)) { + } else if (FormatNames.WEEK_YEAR_WEEK.matches(input) ) { return WEEKYEAR_WEEK; - } else if ("weekyearWeekDay".equals(input) || "weekyear_week_day".equals(input)) { + } else if (FormatNames.WEEKYEAR_WEEK_DAY.matches(input) ) { return WEEKYEAR_WEEK_DAY; - } else if ("year".equals(input)) { + } else if (FormatNames.YEAR.matches(input)) { return YEAR; - } else if ("yearMonth".equals(input) || "year_month".equals(input)) { + } else if (FormatNames.YEAR_MONTH.matches(input) ) { return YEAR_MONTH; - } else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) { + } else if (FormatNames.YEAR_MONTH_DAY.matches(input) ) { return YEAR_MONTH_DAY; - } else if ("epoch_second".equals(input)) { + } else if (FormatNames.EPOCH_SECOND.matches(input)) { return EpochTime.SECONDS_FORMATTER; - } else if ("epoch_millis".equals(input)) { + } else if (FormatNames.EPOCH_MILLIS.matches(input)) { return EpochTime.MILLIS_FORMATTER; - // strict date formats here, must be at least 4 digits for year and two for months and two for day - } else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) { + // strict date formats here, must be at least 4 digits for year and two for months and two for day + } else if (FormatNames.STRICT_BASIC_WEEK_DATE.matches(input) ) { return STRICT_BASIC_WEEK_DATE; - } else if ("strictBasicWeekDateTime".equals(input) || "strict_basic_week_date_time".equals(input)) { + } else if (FormatNames.STRICT_BASIC_WEEK_DATE_TIME.matches(input) ) { return STRICT_BASIC_WEEK_DATE_TIME; - } else if ("strictBasicWeekDateTimeNoMillis".equals(input) || "strict_basic_week_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS.matches(input) ) { return STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS; - } else if ("strictDate".equals(input) || "strict_date".equals(input)) { + } else if (FormatNames.STRICT_DATE.matches(input) ) { return STRICT_DATE; - } else if ("strictDateHour".equals(input) || "strict_date_hour".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR.matches(input) ) { return STRICT_DATE_HOUR; - } else if ("strictDateHourMinute".equals(input) || "strict_date_hour_minute".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE.matches(input) ) { return STRICT_DATE_HOUR_MINUTE; - } else if ("strictDateHourMinuteSecond".equals(input) || "strict_date_hour_minute_second".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND.matches(input) ) { return STRICT_DATE_HOUR_MINUTE_SECOND; - } else if ("strictDateHourMinuteSecondFraction".equals(input) || "strict_date_hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION.matches(input) ) { return STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; - } else if ("strictDateHourMinuteSecondMillis".equals(input) || "strict_date_hour_minute_second_millis".equals(input)) { + } else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS.matches(input) ) { return STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS; - } else if ("strictDateOptionalTime".equals(input) || "strict_date_optional_time".equals(input)) { + } else if (FormatNames.STRICT_DATE_OPTIONAL_TIME.matches(input) ) { return STRICT_DATE_OPTIONAL_TIME; - } else if ("strictDateOptionalTimeNanos".equals(input) || "strict_date_optional_time_nanos".equals(input)) { + } else if (FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS.matches(input) ) { return STRICT_DATE_OPTIONAL_TIME_NANOS; - } else if ("strictDateTime".equals(input) || "strict_date_time".equals(input)) { + } else if (FormatNames.STRICT_DATE_TIME.matches(input) ) { return STRICT_DATE_TIME; - } else if ("strictDateTimeNoMillis".equals(input) || "strict_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_DATE_TIME_NO_MILLIS.matches(input) ) { return STRICT_DATE_TIME_NO_MILLIS; - } else if ("strictHour".equals(input) || "strict_hour".equals(input)) { + } else if (FormatNames.STRICT_HOUR.matches(input) ) { return STRICT_HOUR; - } else if ("strictHourMinute".equals(input) || "strict_hour_minute".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE.matches(input) ) { return STRICT_HOUR_MINUTE; - } else if ("strictHourMinuteSecond".equals(input) || "strict_hour_minute_second".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE_SECOND.matches(input) ) { return STRICT_HOUR_MINUTE_SECOND; - } else if ("strictHourMinuteSecondFraction".equals(input) || "strict_hour_minute_second_fraction".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION.matches(input) ) { return STRICT_HOUR_MINUTE_SECOND_FRACTION; - } else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) { + } else if (FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS.matches(input) ) { return STRICT_HOUR_MINUTE_SECOND_MILLIS; - } else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) { + } else if (FormatNames.STRICT_ORDINAL_DATE.matches(input) ) { return STRICT_ORDINAL_DATE; - } else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) { + } else if (FormatNames.STRICT_ORDINAL_DATE_TIME.matches(input) ) { return STRICT_ORDINAL_DATE_TIME; - } else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS.matches(input) ) { return STRICT_ORDINAL_DATE_TIME_NO_MILLIS; - } else if ("strictTime".equals(input) || "strict_time".equals(input)) { + } else if (FormatNames.STRICT_TIME.matches(input) ) { return STRICT_TIME; - } else if ("strictTimeNoMillis".equals(input) || "strict_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_TIME_NO_MILLIS.matches(input) ) { return STRICT_TIME_NO_MILLIS; - } else if ("strictTTime".equals(input) || "strict_t_time".equals(input)) { + } else if (FormatNames.STRICT_T_TIME.matches(input) ) { return STRICT_T_TIME; - } else if ("strictTTimeNoMillis".equals(input) || "strict_t_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_T_TIME_NO_MILLIS.matches(input) ) { return STRICT_T_TIME_NO_MILLIS; - } else if ("strictWeekDate".equals(input) || "strict_week_date".equals(input)) { + } else if (FormatNames.STRICT_WEEK_DATE.matches(input) ) { return STRICT_WEEK_DATE; - } else if ("strictWeekDateTime".equals(input) || "strict_week_date_time".equals(input)) { + } else if (FormatNames.STRICT_WEEK_DATE_TIME.matches(input) ) { return STRICT_WEEK_DATE_TIME; - } else if ("strictWeekDateTimeNoMillis".equals(input) || "strict_week_date_time_no_millis".equals(input)) { + } else if (FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS.matches(input) ) { return STRICT_WEEK_DATE_TIME_NO_MILLIS; - } else if ("strictWeekyear".equals(input) || "strict_weekyear".equals(input)) { + } else if (FormatNames.STRICT_WEEKYEAR.matches(input) ) { return STRICT_WEEKYEAR; - } else if ("strictWeekyearWeek".equals(input) || "strict_weekyear_week".equals(input)) { + } else if (FormatNames.STRICT_WEEKYEAR_WEEK.matches(input) ) { return STRICT_WEEKYEAR_WEEK; - } else if ("strictWeekyearWeekDay".equals(input) || "strict_weekyear_week_day".equals(input)) { + } else if (FormatNames.STRICT_WEEKYEAR_WEEK_DAY.matches(input) ) { return STRICT_WEEKYEAR_WEEK_DAY; - } else if ("strictYear".equals(input) || "strict_year".equals(input)) { + } else if (FormatNames.STRICT_YEAR.matches(input) ) { return STRICT_YEAR; - } else if ("strictYearMonth".equals(input) || "strict_year_month".equals(input)) { + } else if (FormatNames.STRICT_YEAR_MONTH.matches(input) ) { return STRICT_YEAR_MONTH; - } else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) { + } else if (FormatNames.STRICT_YEAR_MONTH_DAY.matches(input) ) { return STRICT_YEAR_MONTH_DAY; } else { try { diff --git a/server/src/main/java/org/elasticsearch/common/time/FormatNames.java b/server/src/main/java/org/elasticsearch/common/time/FormatNames.java new file mode 100644 index 00000000000..bf4c6d18446 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/FormatNames.java @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public enum FormatNames { + ISO8601("iso8601", "iso8601"), + BASIC_DATE("basicDate", "basic_date"), + BASIC_DATE_TIME("basicDateTime", "basic_date_time"), + BASIC_DATE_TIME_NO_MILLIS("basicDateTimeNoMillis", "basic_date_time_no_millis"), + BASIC_ORDINAL_DATE("basicOrdinalDate", "basic_ordinal_date"), + BASIC_ORDINAL_DATE_TIME("basicOrdinalDateTime", "basic_ordinal_date_time"), + BASIC_ORDINAL_DATE_TIME_NO_MILLIS("basicOrdinalDateTimeNoMillis", "basic_ordinal_date_time_no_millis"), + BASIC_TIME("basicTime", "basic_time"), + BASIC_TIME_NO_MILLIS("basicTimeNoMillis", "basic_time_no_millis"), + BASIC_T_TIME("basicTTime", "basic_t_time"), + BASIC_T_TIME_NO_MILLIS("basicTTimeNoMillis", "basic_t_time_no_millis"), + BASIC_WEEK_DATE("basicWeekDate", "basic_week_date"), + BASIC_WEEK_DATE_TIME("basicWeekDateTime", "basic_week_date_time"), + BASIC_WEEK_DATE_TIME_NO_MILLIS("basicWeekDateTimeNoMillis", "basic_week_date_time_no_millis"), + DATE("date", "date"), + DATE_HOUR("dateHour", "date_hour"), + DATE_HOUR_MINUTE("dateHourMinute", "date_hour_minute"), + DATE_HOUR_MINUTE_SECOND("dateHourMinuteSecond", "date_hour_minute_second"), + DATE_HOUR_MINUTE_SECOND_FRACTION("dateHourMinuteSecondFraction", "date_hour_minute_second_fraction"), + DATE_HOUR_MINUTE_SECOND_MILLIS("dateHourMinuteSecondMillis", "date_hour_minute_second_millis"), + DATE_OPTIONAL_TIME("dateOptionalTime", "date_optional_time"), + DATE_TIME("dateTime", "date_time"), + DATE_TIME_NO_MILLIS("dateTimeNoMillis", "date_time_no_millis"), + HOUR("hour", "hour"), + HOUR_MINUTE("hourMinute", "hour_minute"), + HOUR_MINUTE_SECOND("hourMinuteSecond", "hour_minute_second"), + HOUR_MINUTE_SECOND_FRACTION("hourMinuteSecondFraction", "hour_minute_second_fraction"), + HOUR_MINUTE_SECOND_MILLIS("hourMinuteSecondMillis", "hour_minute_second_millis"), + ORDINAL_DATE("ordinalDate", "ordinal_date"), + ORDINAL_DATE_TIME("ordinalDateTime", "ordinal_date_time"), + ORDINAL_DATE_TIME_NO_MILLIS("ordinalDateTimeNoMillis", "ordinal_date_time_no_millis"), + TIME("time", "time"), + TIME_NO_MILLIS("timeNoMillis", "time_no_millis"), + T_TIME("tTime", "t_time"), + T_TIME_NO_MILLIS("tTimeNoMillis", "t_time_no_millis"), + WEEK_DATE("weekDate", "week_date"), + WEEK_DATE_TIME("weekDateTime", "week_date_time"), + WEEK_DATE_TIME_NO_MILLIS("weekDateTimeNoMillis", "week_date_time_no_millis"), + WEEK_YEAR("weekyear", "week_year"), + WEEK_YEAR_WEEK("weekyearWeek", "weekyear_week"), + WEEKYEAR_WEEK_DAY("weekyearWeekDay", "weekyear_week_day"), + YEAR("year", "year"), + YEAR_MONTH("yearMonth", "year_month"), + YEAR_MONTH_DAY("yearMonthDay", "year_month_day"), + EPOCH_SECOND("epoch_second", "epoch_second"), + EPOCH_MILLIS("epoch_millis", "epoch_millis"), + // strict date formats here, must be at least 4 digits for year and two for months and two for day" + STRICT_BASIC_WEEK_DATE("strictBasicWeekDate", "strict_basic_week_date"), + STRICT_BASIC_WEEK_DATE_TIME("strictBasicWeekDateTime", "strict_basic_week_date_time"), + STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS("strictBasicWeekDateTimeNoMillis", "strict_basic_week_date_time_no_millis"), + STRICT_DATE("strictDate", "strict_date"), + STRICT_DATE_HOUR("strictDateHour", "strict_date_hour"), + STRICT_DATE_HOUR_MINUTE("strictDateHourMinute", "strict_date_hour_minute"), + STRICT_DATE_HOUR_MINUTE_SECOND("strictDateHourMinuteSecond", "strict_date_hour_minute_second"), + STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION("strictDateHourMinuteSecondFraction", "strict_date_hour_minute_second_fraction"), + STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS("strictDateHourMinuteSecondMillis", "strict_date_hour_minute_second_millis"), + STRICT_DATE_OPTIONAL_TIME("strictDateOptionalTime", "strict_date_optional_time"), + STRICT_DATE_OPTIONAL_TIME_NANOS("strictDateOptionalTimeNanos", "strict_date_optional_time_nanos"), + STRICT_DATE_TIME("strictDateTime", "strict_date_time"), + STRICT_DATE_TIME_NO_MILLIS("strictDateTimeNoMillis", "strict_date_time_no_millis"), + STRICT_HOUR("strictHour", "strict_hour"), + STRICT_HOUR_MINUTE("strictHourMinute", "strict_hour_minute"), + STRICT_HOUR_MINUTE_SECOND("strictHourMinuteSecond", "strict_hour_minute_second"), + STRICT_HOUR_MINUTE_SECOND_FRACTION("strictHourMinuteSecondFraction", "strict_hour_minute_second_fraction"), + STRICT_HOUR_MINUTE_SECOND_MILLIS("strictHourMinuteSecondMillis", "strict_hour_minute_second_millis"), + STRICT_ORDINAL_DATE("strictOrdinalDate", "strict_ordinal_date"), + STRICT_ORDINAL_DATE_TIME("strictOrdinalDateTime", "strict_ordinal_date_time"), + STRICT_ORDINAL_DATE_TIME_NO_MILLIS("strictOrdinalDateTimeNoMillis", "strict_ordinal_date_time_no_millis"), + STRICT_TIME("strictTime", "strict_time"), + STRICT_TIME_NO_MILLIS("strictTimeNoMillis", "strict_time_no_millis"), + STRICT_T_TIME("strictTTime", "strict_t_time"), + STRICT_T_TIME_NO_MILLIS("strictTTimeNoMillis", "strict_t_time_no_millis"), + STRICT_WEEK_DATE("strictWeekDate", "strict_week_date"), + STRICT_WEEK_DATE_TIME("strictWeekDateTime", "strict_week_date_time"), + STRICT_WEEK_DATE_TIME_NO_MILLIS("strictWeekDateTimeNoMillis", "strict_week_date_time_no_millis"), + STRICT_WEEKYEAR("strictWeekyear", "strict_weekyear"), + STRICT_WEEKYEAR_WEEK("strictWeekyearWeek", "strict_weekyear_week"), + STRICT_WEEKYEAR_WEEK_DAY("strictWeekyearWeekDay", "strict_weekyear_week_day"), + STRICT_YEAR("strictYear", "strict_year"), + STRICT_YEAR_MONTH("strictYearMonth", "strict_year_month"), + STRICT_YEAR_MONTH_DAY("strictYearMonthDay", "strict_year_month_day"); + + private static final Set ALL_NAMES = Arrays.stream(values()) + .flatMap(n -> Stream.of(n.snakeCaseName, n.camelCaseName)) + .collect(Collectors.toSet()); + private final String camelCaseName; + private final String snakeCaseName; + + FormatNames(String camelCaseName, String snakeCaseName) { + this.camelCaseName = camelCaseName; + this.snakeCaseName = snakeCaseName; + } + + public static boolean exist(String format) { + return ALL_NAMES.contains(format); + } + + public boolean matches(String format) { + return format.equals(camelCaseName) || format.equals(snakeCaseName); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index 061d83c9c38..16ff794979e 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; +import org.joda.time.format.DateTimeFormat; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -34,10 +35,53 @@ import java.time.temporal.TemporalAccessor; import java.util.Locale; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class JavaJodaTimeDuellingTests extends ESTestCase { + public void testIncompatiblePatterns() { + // in joda 'y' means year, this is changed to 'u' in java.time. difference is in before era yeaers + assertSameMillis("-0001-01-01", "yyyy-MM-dd", "8uuuu-MM-dd"); + assertSameMillis("-1", "y", "8u"); + + // year-of-era in joda becomes 'y' in java.time + assertSameMillis("2019-01-01", "YYYY-MM-dd", "8yyyy-MM-dd"); + + + //in joda 'Z' was able to parse 'Z' zulu but in java it fails. You have to use 'X' to do that. + assertSameMillis("2019-01-01T01:01:01.001Z", "YYYY-MM-dd'T'HH:mm:ss.SSSZ", "8yyyy-MM-dd'T'HH:mm:ss.SSSX"); + assertSameMillis("2019-01-01T01:01:01.001+0000", "YYYY-MM-dd'T'HH:mm:ss.SSSZ", "8yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + + + // 'z' zoneId in joda prints UTC whereas joda prints 'Z' for zulu + TemporalAccessor parse = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSz",Locale.getDefault()) + .parse("2019-01-01T01:01:01.001+00:00"); + String javaZoneId = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSz",Locale.getDefault()) + .format(parse); + + DateTime dateTime = DateTimeFormat.forPattern("YYYY-MM-dd'T'HH:mm:ss.SSSZ").withOffsetParsed() + .parseDateTime("2019-01-01T01:01:01.001+0000"); + String jodaZoneId = DateTimeFormat.forPattern("YYYY-MM-dd'T'HH:mm:ss.SSSz").print(dateTime); + assertThat(javaZoneId, equalTo("2019-01-01T01:01:01.001Z")); + assertThat(jodaZoneId, equalTo("2019-01-01T01:01:01.001UTC")); + } + + private void assertSameMillis(String input, String jodaFormat, String javaFormat) { + DateFormatter jodaFormatter = Joda.forPattern(jodaFormat); + DateFormatter javaFormatter = DateFormatter.forPattern(javaFormat); + + DateTime jodaDateTime = jodaFormatter.parseJoda(input); + + TemporalAccessor javaTimeAccessor = javaFormatter.parse(input); + ZonedDateTime zonedDateTime = DateFormatters.from(javaTimeAccessor); + + String msg = String.format(Locale.ROOT, "Input [%s] JodaFormat [%s] JavaFormat [%s] Joda [%s], Java [%s]", + input, jodaFormat, javaFormat, jodaDateTime, DateTimeFormatter.ISO_INSTANT.format(zonedDateTime.toInstant())); + + assertThat(msg, jodaDateTime.getMillis(), is(zonedDateTime.toInstant().toEpochMilli())); + } + public void testTimeZoneFormatting() { assertSameDate("2001-01-01T00:00:00Z", "date_time_no_millis"); // the following fail under java 8 but work under java 10, needs investigation diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 6b36f985c21..ff56921b339 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -65,6 +65,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.joda.JodaDeprecationPatterns; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; @@ -355,6 +356,10 @@ public abstract class ESTestCase extends LuceneTestCase { return true; } + protected boolean enableJodaDeprecationWarningsCheck() { + return false; + } + @After public final void after() throws Exception { checkStaticState(false); @@ -385,7 +390,13 @@ public abstract class ESTestCase extends LuceneTestCase { //appropriate test try { final List warnings = threadContext.getResponseHeaders().get("Warning"); - assertNull("unexpected warning headers", warnings); + if (warnings != null && enableJodaDeprecationWarningsCheck() == false) { + List filteredWarnings = filterJodaDeprecationWarnings(warnings); + assertThat( filteredWarnings, empty()); + + } else { + assertNull("unexpected warning headers", warnings); + } } finally { resetDeprecationLogger(false); } @@ -418,20 +429,35 @@ public abstract class ESTestCase extends LuceneTestCase { } try { final List actualWarnings = threadContext.getResponseHeaders().get("Warning"); - assertNotNull("no warnings, expected: " + Arrays.asList(expectedWarnings), actualWarnings); - final Set actualWarningValues = - actualWarnings.stream().map(DeprecationLogger::extractWarningValueFromWarningHeader).collect(Collectors.toSet()); - for (String msg : expectedWarnings) { - assertThat(actualWarningValues, hasItem(DeprecationLogger.escapeAndEncode(msg))); + if (actualWarnings != null && enableJodaDeprecationWarningsCheck() == false) { + List filteredWarnings = filterJodaDeprecationWarnings(actualWarnings); + assertWarnings(filteredWarnings, expectedWarnings); + } else { + assertWarnings(actualWarnings, expectedWarnings); } - assertEquals("Expected " + expectedWarnings.length + " warnings but found " + actualWarnings.size() + "\nExpected: " - + Arrays.asList(expectedWarnings) + "\nActual: " + actualWarnings, - expectedWarnings.length, actualWarnings.size()); } finally { resetDeprecationLogger(true); } } + private List filterJodaDeprecationWarnings(List actualWarnings) { + return actualWarnings.stream() + .filter(m -> m.contains(JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS) == false) + .collect(Collectors.toList()); + } + + private void assertWarnings(List actualWarnings, String[] expectedWarnings) { + assertNotNull("no warnings, expected: " + Arrays.asList(expectedWarnings), actualWarnings); + final Set actualWarningValues = + actualWarnings.stream().map(DeprecationLogger::extractWarningValueFromWarningHeader).collect(Collectors.toSet()); + for (String msg : expectedWarnings) { + assertThat(actualWarningValues, hasItem(DeprecationLogger.escapeAndEncode(msg))); + } + assertEquals("Expected " + expectedWarnings.length + " warnings but found " + actualWarnings.size() + "\nExpected: " + + Arrays.asList(expectedWarnings) + "\nActual: " + actualWarnings, + expectedWarnings.length, actualWarnings.size()); + } + /** * Reset the deprecation logger by removing the current thread context, and setting a new thread context if {@code setNewThreadContext} * is set to {@code true} and otherwise clearing the current thread context. diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java index 57a474744b6..162a2a6bf93 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java @@ -58,7 +58,6 @@ public class ClusterDeprecationChecks { "Ingest pipelines " + pipelinesWithDeprecatedEcsConfig + " uses the [ecs] option which needs to be removed to work in 8.0"); } return null; - } static DeprecationIssue checkTemplatesWithTooManyFields(ClusterState state) { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index b63a828ecbb..194c412ffe2 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -48,7 +48,8 @@ public class DeprecationChecks { Collections.unmodifiableList(Arrays.asList( IndexDeprecationChecks::oldIndicesCheck, IndexDeprecationChecks::tooManyFieldsCheck, - IndexDeprecationChecks::chainedMultiFieldsCheck + IndexDeprecationChecks::chainedMultiFieldsCheck, + IndexDeprecationChecks::deprecatedDateTimeFormat )); static List> ML_SETTINGS_CHECKS = diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 1e9876a87fe..ead1b665ba7 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -10,6 +10,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.joda.JodaDeprecationPatterns; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -21,8 +22,10 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; +import java.util.function.BiFunction; import java.util.function.Function; + /** * Index-specific deprecation checks */ @@ -42,11 +45,13 @@ public class IndexDeprecationChecks { * @param type the document type * @param parentMap the mapping to read properties from * @param predicate the predicate to check against for issues, issue is returned if predicate evaluates to true + * @param fieldFormatter a function that takes a type and mapping field entry and returns a formatted field representation * @return a list of issues found in fields */ @SuppressWarnings("unchecked") static List findInPropertiesRecursively(String type, Map parentMap, - Function, Boolean> predicate) { + Function, Boolean> predicate, + BiFunction, String> fieldFormatter) { List issues = new ArrayList<>(); Map properties = (Map) parentMap.get("properties"); if (properties == null) { @@ -55,7 +60,7 @@ public class IndexDeprecationChecks { for (Map.Entry entry : properties.entrySet()) { Map valueMap = (Map) entry.getValue(); if (predicate.apply(valueMap)) { - issues.add("[type: " + type + ", field: " + entry.getKey() + "]"); + issues.add("[" + fieldFormatter.apply(type, entry) + "]"); } Map values = (Map) valueMap.get("fields"); @@ -63,21 +68,31 @@ public class IndexDeprecationChecks { for (Map.Entry multifieldEntry : values.entrySet()) { Map multifieldValueMap = (Map) multifieldEntry.getValue(); if (predicate.apply(multifieldValueMap)) { - issues.add("[type: " + type + ", field: " + entry.getKey() + ", multifield: " + multifieldEntry.getKey() + "]"); + issues.add("[" + fieldFormatter.apply(type, entry) + ", multifield: " + multifieldEntry.getKey() + "]"); } if (multifieldValueMap.containsKey("properties")) { - issues.addAll(findInPropertiesRecursively(type, multifieldValueMap, predicate)); + issues.addAll(findInPropertiesRecursively(type, multifieldValueMap, predicate, fieldFormatter)); } } } if (valueMap.containsKey("properties")) { - issues.addAll(findInPropertiesRecursively(type, valueMap, predicate)); + issues.addAll(findInPropertiesRecursively(type, valueMap, predicate, fieldFormatter)); } } return issues; } + private static String formatDateField(String type, Map.Entry entry) { + Map value = (Map) entry.getValue(); + return "type: " + type + ", field: " + entry.getKey() +", format: "+ value.get("format") +", suggestion: " + + JodaDeprecationPatterns.formatSuggestion((String)value.get("format")); + } + + private static String formatField(String type, Map.Entry entry) { + return "type: " + type + ", field: " + entry.getKey(); + } + static DeprecationIssue oldIndicesCheck(IndexMetaData indexMetaData) { Version createdWith = indexMetaData.getCreationVersion(); if (createdWith.before(Version.V_7_0_0)) { @@ -86,7 +101,7 @@ public class IndexDeprecationChecks { "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + "breaking-changes-8.0.html", "This index was created using version: " + createdWith); - } + } return null; } @@ -115,10 +130,38 @@ public class IndexDeprecationChecks { return null; } + static DeprecationIssue deprecatedDateTimeFormat(IndexMetaData indexMetaData) { + Version createdWith = indexMetaData.getCreationVersion(); + if (createdWith.before(Version.V_7_0_0)) { + List fields = new ArrayList<>(); + + fieldLevelMappingIssue(indexMetaData, ((mappingMetaData, sourceAsMap) -> fields.addAll( + findInPropertiesRecursively(mappingMetaData.type(), sourceAsMap, + IndexDeprecationChecks::isDateFieldWithDeprecatedPattern, + IndexDeprecationChecks::formatDateField)))); + + if (fields.size() > 0) { + return new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Date field format uses patterns which has changed meaning in 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/7.0/breaking-changes-7.0.html#breaking_70_java_time_changes", + "This index has date fields with deprecated formats: " + fields + ". " + + JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); + } + } + return null; + } + + private static boolean isDateFieldWithDeprecatedPattern(Map property) { + return "date".equals(property.get("type")) && + property.containsKey("format") && + JodaDeprecationPatterns.isDeprecatedPattern((String) property.get("format")); + } + static DeprecationIssue chainedMultiFieldsCheck(IndexMetaData indexMetaData) { List issues = new ArrayList<>(); fieldLevelMappingIssue(indexMetaData, ((mappingMetaData, sourceAsMap) -> issues.addAll( - findInPropertiesRecursively(mappingMetaData.type(), sourceAsMap, IndexDeprecationChecks::containsChainedMultiFields)))); + findInPropertiesRecursively(mappingMetaData.type(), sourceAsMap, + IndexDeprecationChecks::containsChainedMultiFields, IndexDeprecationChecks::formatField)))); if (issues.size() > 0) { return new DeprecationIssue(DeprecationIssue.Level.WARNING, "Multi-fields within multi-fields", diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index a2634f0206a..34e85a89527 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.joda.JodaDeprecationPatterns; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexSettings; @@ -25,6 +26,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.singletonList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; +import static org.hamcrest.Matchers.hasItem; public class IndexDeprecationChecksTests extends ESTestCase { public void testOldIndicesCheck() { @@ -156,6 +158,195 @@ public class IndexDeprecationChecksTests extends ESTestCase { "The names of fields that contain chained multi-fields: [[type: _doc, field: invalid-field]]"); assertEquals(singletonList(expected), issues); } + public void testDefinedPatternsDoNotWarn() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field_Y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"strictWeekyearWeek\"\n" + + " }\n" + + " }" + + "}"; + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue issue = IndexDeprecationChecks.deprecatedDateTimeFormat(simpleIndex); + assertNull(issue); + } + + public void testMigratedPatterns() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field_Y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"8MM-YYYY\"\n" + + " }\n" + + " }" + + "}"; + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue issue = IndexDeprecationChecks.deprecatedDateTimeFormat(simpleIndex); + assertNull(issue); + } + + public void testMultipleWarningsOnCombinedPattern() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field_Y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"dd-CC||MM-YYYY\"\n" + + " }\n" + + " }" + + "}"; + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Date field format uses patterns which has changed meaning in 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/7.0/breaking-changes-7.0.html#breaking_70_java_time_changes", + "This index has date fields with deprecated formats: ["+ + "[type: _doc, field: date_time_field_Y, format: dd-CC||MM-YYYY, " + + "suggestion: 'C' century of era is no longer supported." + + "; "+ + "'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.]"+ + "]. "+ JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); + assertThat(issues, hasItem(expected)); + } + + public void testDuplicateWarningsOnCombinedPattern() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field_Y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"dd-YYYY||MM-YYYY\"\n" + + " }\n" + + " }" + + "}"; + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Date field format uses patterns which has changed meaning in 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/7.0/breaking-changes-7.0.html#breaking_70_java_time_changes", + "This index has date fields with deprecated formats: ["+ + "[type: _doc, field: date_time_field_Y, format: dd-YYYY||MM-YYYY, " + + "suggestion: 'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.]"+ + "]. "+ JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); + assertThat(issues, hasItem(expected)); + } + + public void testWarningsOnMixCustomAndDefinedPattern() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field_Y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"strictWeekyearWeek||MM-YYYY\"\n" + + " }\n" + + " }" + + "}"; + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Date field format uses patterns which has changed meaning in 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/7.0/breaking-changes-7.0.html#breaking_70_java_time_changes", + "This index has date fields with deprecated formats: ["+ + "[type: _doc, field: date_time_field_Y, format: strictWeekyearWeek||MM-YYYY, " + + "suggestion: 'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.]"+ + "]. "+ JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); + assertThat(issues, hasItem(expected)); + } + + public void testJodaPatternDeprecations() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field_Y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"MM-YYYY\"\n" + + " },\n" + + " \"date_time_field_C\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"CC\"\n" + + " },\n" + + " \"date_time_field_x\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"xx-MM\"\n" + + " },\n" + + " \"date_time_field_y\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"yy-MM\"\n" + + " },\n" + + " \"date_time_field_Z\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"HH:mmZ\"\n" + + " },\n" + + " \"date_time_field_z\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"HH:mmz\"\n" + + " }\n" + + " }" + + "}"; + + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Date field format uses patterns which has changed meaning in 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/7.0/breaking-changes-7.0.html#breaking_70_java_time_changes", + "This index has date fields with deprecated formats: ["+ + "[type: _doc, field: date_time_field_Y, format: MM-YYYY, " + + "suggestion: 'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.], "+ + "[type: _doc, field: date_time_field_C, format: CC, " + + "suggestion: 'C' century of era is no longer supported.], "+ + "[type: _doc, field: date_time_field_x, format: xx-MM, " + + "suggestion: 'x' weak-year should be replaced with 'Y'. Use 'x' for zone-offset.], "+ + "[type: _doc, field: date_time_field_y, format: yy-MM, " + + "suggestion: 'y' year should be replaced with 'u'. Use 'y' for year-of-era.], "+ + "[type: _doc, field: date_time_field_Z, format: HH:mmZ, " + + "suggestion: 'Z' time zone offset/id fails when parsing 'Z' for Zulu timezone. Consider using 'X'.], "+ + "[type: _doc, field: date_time_field_z, format: HH:mmz, " + + "suggestion: 'z' time zone text. Will print 'Z' for Zulu given UTC timezone." + + "]"+ + "]. "+ JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); + assertThat(issues, hasItem(expected)); + } + + public void testMultipleJodaPatternDeprecationInOneField() throws IOException { + String simpleMapping = "{\n" + + "\"properties\" : {\n" + + " \"date_time_field\" : {\n" + + " \"type\" : \"date\",\n" + + " \"format\" : \"Y-C-x-y\"\n" + + " }\n" + + " }" + + "}"; + + IndexMetaData simpleIndex = createV6Index(simpleMapping); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Date field format uses patterns which has changed meaning in 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/7.0/breaking-changes-7.0.html#breaking_70_java_time_changes", + "This index has date fields with deprecated formats: ["+ + "[type: _doc, field: date_time_field, format: Y-C-x-y, " + + "suggestion: 'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.; " + + "'y' year should be replaced with 'u'. Use 'y' for year-of-era.; " + + "'C' century of era is no longer supported.; " + + "'x' weak-year should be replaced with 'Y'. Use 'x' for zone-offset." + + "]"+ + "]. "+ JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); + assertThat(issues, hasItem(expected)); + } + + public IndexMetaData createV6Index(String simpleMapping) throws IOException { + return IndexMetaData.builder(randomAlphaOfLengthBetween(5, 10)) + .settings(settings( + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, + VersionUtils.getPreviousVersion(Version.V_7_0_0)))) + .numberOfShards(randomIntBetween(1, 100)) + .numberOfReplicas(randomIntBetween(1, 100)) + .putMapping("_doc", simpleMapping) + .build(); + } static void addRandomFields(final int fieldLimit, XContentBuilder mappingBuilder) throws IOException { From 757c6a45a0494a62f44bf26fcd76be69b2692830 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 5 Jun 2019 12:29:40 -0700 Subject: [PATCH 079/210] [DOCS] Adds discovery.type (#42823) Co-Authored-By: David Turner --- .../discovery/discovery-settings.asciidoc | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/reference/modules/discovery/discovery-settings.asciidoc b/docs/reference/modules/discovery/discovery-settings.asciidoc index 0c23e771089..bfd9cbd51fe 100644 --- a/docs/reference/modules/discovery/discovery-settings.asciidoc +++ b/docs/reference/modules/discovery/discovery-settings.asciidoc @@ -24,14 +24,21 @@ Discovery and cluster formation are affected by the following settings: name is deprecated but continues to work in order to preserve backwards compatibility. Support for the old name will be removed in a future version. + +`discovery.type`:: + + Specifies whether {es} should form a multiple-node cluster. By default, {es} + discovers other nodes when forming a cluster and allows other nodes to join + the cluster later. If `discovery.type` is set to `single-node`, {es} forms a + single-node cluster. For more information about when you might use this + setting, see <>. `cluster.initial_master_nodes`:: - Sets a list of the <> or transport addresses of the - initial set of master-eligible nodes in a brand-new cluster. By default - this list is empty, meaning that this node expects to join a cluster that - has already been bootstrapped. See <>. - + Sets the initial set of master-eligible nodes in a brand-new cluster. By + default this list is empty, meaning that this node expects to join a cluster + that has already been bootstrapped. See <>. + [float] ==== Expert settings From 1300183001f27f53c9b74b3f6a3227a586516c01 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Wed, 5 Jun 2019 16:03:07 -0400 Subject: [PATCH 080/210] NullPointerException when creating a watch with Jira action (#41922) (#42081) (#42873) NullPointerException when secured_url does not use proper scheme in jira action. This commit will handle Expection and display proper message. --- .../xpack/watcher/common/http/Scheme.java | 2 ++ .../xpack/watcher/notification/jira/JiraAccount.java | 5 ++++- .../watcher/notification/jira/JiraAccountTests.java | 10 ++++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/Scheme.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/Scheme.java index 04557271c26..15afb8885b5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/Scheme.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/Scheme.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.common.http; import java.util.Locale; +import java.util.Objects; public enum Scheme { @@ -29,6 +30,7 @@ public enum Scheme { } public static Scheme parse(String value) { + Objects.requireNonNull(value, "Scheme should not be Null"); value = value.toLowerCase(Locale.ROOT); switch (value) { case "http": diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java index b539d007eee..d59f7e6f645 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java @@ -61,6 +61,9 @@ public class JiraAccount { String url = getSetting(name, settings, SECURE_URL_SETTING); try { URI uri = new URI(url); + if (uri.getScheme() == null) { + throw new URISyntaxException("null", "No scheme defined in url"); + } Scheme protocol = Scheme.parse(uri.getScheme()); if ((protocol == Scheme.HTTP) && (Booleans.isTrue(settings.get(ALLOW_HTTP_SETTING)) == false)) { throw new SettingsException("invalid jira [" + name + "] account settings. unsecure scheme [" + protocol + "]"); @@ -68,7 +71,7 @@ public class JiraAccount { this.url = uri; } catch (URISyntaxException | IllegalArgumentException e) { throw new SettingsException( - "invalid jira [" + name + "] account settings. invalid [" + SECURE_URL_SETTING.getKey() + "] setting", e); + "invalid jira [" + name + "] account settings. invalid [" + SECURE_URL_SETTING.getKey() + "] setting", e); } this.user = getSetting(name, settings, SECURE_USER_SETTING); this.password = getSetting(name, settings, SECURE_PASSWORD_SETTING); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java index 997a6aa6a8d..1b911172dc1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java @@ -80,6 +80,16 @@ public class JiraAccountTests extends ESTestCase { assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [secure_password] setting")); } + public void testInvalidSchemeUrl() throws Exception{ + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(JiraAccount.SECURE_URL_SETTING.getKey(),"test"); //Setting test as invalid scheme url + secureSettings.setString(JiraAccount.SECURE_USER_SETTING.getKey(), "foo"); + secureSettings.setString(JiraAccount.SECURE_PASSWORD_SETTING.getKey(), "password"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + SettingsException e = expectThrows(SettingsException.class, () -> new JiraAccount("test", settings, null)); + assertThat(e.getMessage(), containsString("invalid jira [test] account settings. invalid [secure_url] setting")); + } + public void testUnsecureAccountUrl() throws Exception { final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString(JiraAccount.SECURE_USER_SETTING.getKey(), "foo"); From 99542e66a647a5a62362eb28b631d223f7c5f179 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 5 Jun 2019 22:02:17 +0200 Subject: [PATCH 081/210] [Docs] Clarify caveats for phonetic filters replace option (#42807) The `replace` option in the phonetic token filter can have suprising side effects, e.g. such as described in #26921. This PR adds a note to be mindful about such scenarios and offers alternatives to using the `replace` option. Closes #26921 --- docs/plugins/analysis-phonetic.asciidoc | 8 ++++++++ docs/reference/query-dsl/match-query.asciidoc | 3 ++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index e22f819e1eb..3627751670a 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -65,6 +65,14 @@ GET phonetic_sample/_analyze <1> Returns: `J`, `joe`, `BLKS`, `bloggs` +It is important to note that `"replace": false` can lead to unexpected behavior since +the original and the phonetically analyzed version are both kept at the same token position. +Some queries handle these stacked tokens in special ways. For example, the fuzzy `match` +query does not apply {ref}/common-options.html#fuzziness[fuzziness] to stacked synonym tokens. +This can lead to issues that are difficult to diagnose and reason about. For this reason, it +is often beneficial to use separate fields for analysis with and without phonetic filtering. +That way searches can be run against both fields with differing boosts and trade-offs (e.g. +only run a fuzzy `match` query on the original text field, but not on the phonetic version). [float] ===== Double metaphone settings diff --git a/docs/reference/query-dsl/match-query.asciidoc b/docs/reference/query-dsl/match-query.asciidoc index 5e45d2b3212..23474811449 100644 --- a/docs/reference/query-dsl/match-query.asciidoc +++ b/docs/reference/query-dsl/match-query.asciidoc @@ -56,7 +56,8 @@ rewritten. Fuzzy transpositions (`ab` -> `ba`) are allowed by default but can be disabled by setting `fuzzy_transpositions` to `false`. -Note that fuzzy matching is not applied to terms with synonyms, as under the hood +NOTE: Fuzzy matching is not applied to terms with synonyms or in cases where the +analysis process produces multiple tokens at the same position. Under the hood these terms are expanded to a special synonym query that blends term frequencies, which does not support fuzzy expansion. From 12433434ef779421497908617eed6d2e8417fa61 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 5 Jun 2019 13:28:43 -0700 Subject: [PATCH 082/210] Skip installation of pre-bundled integ-test modules (#42900) (cherry picked from commit 67bedf6c7c5d6db1394c045958668ac930a92d57) --- .../testclusters/ElasticsearchNode.java | 36 ++++++++----------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 83a702f8a85..ef956a55b6b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -23,7 +23,6 @@ import org.elasticsearch.gradle.Distribution; import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; -import org.gradle.api.file.DuplicatesStrategy; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; @@ -361,28 +360,23 @@ public class ElasticsearchNode implements TestClusterConfiguration { private void installModules() { if (distribution == Distribution.INTEG_TEST) { - modules.forEach(module -> services.copy(spec -> { - // ensure we don't override any existing JARs, since these are hardlinks other clusters might be using those files - spec.setDuplicatesStrategy(DuplicatesStrategy.EXCLUDE); + for (File module : modules) { + Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "").replace("-" + version, "")); - if (module.getName().toLowerCase().endsWith(".zip")) { - spec.from(services.zipTree(module)); - } else if (module.isDirectory()) { - spec.from(module); - } else { - throw new IllegalArgumentException("Not a valid module " + module + " for " + this); + // only install modules that are not already bundled with the integ-test distribution + if (Files.exists(destination) == false) { + services.copy(spec -> { + if (module.getName().toLowerCase().endsWith(".zip")) { + spec.from(services.zipTree(module)); + } else if (module.isDirectory()) { + spec.from(module); + } else { + throw new IllegalArgumentException("Not a valid module " + module + " for " + this); + } + spec.into(destination); + }); } - spec.into( - workingDir - .resolve("modules") - .resolve( - module.getName() - .replace(".zip", "") - .replace("-" + version, "") - ) - .toFile() - ); - })); + } } else { LOGGER.info("Not installing " + modules.size() + "(s) since the " + distribution + " distribution already " + "has them"); From 1f4ff97d7dd7b272398b20ba90132685bc413c3c Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 5 Jun 2019 13:45:36 -0700 Subject: [PATCH 083/210] Mute failing test (cherry picked from commit 4952d4facf5949abdb9aae47dbe1ee18cf7eef99) --- .../org/elasticsearch/repositories/fs/FsRepositoryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java index ec8a444d84f..5bb4d4f065e 100644 --- a/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java @@ -75,6 +75,7 @@ import static java.util.Collections.emptySet; public class FsRepositoryTests extends ESTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/42905") public void testSnapshotAndRestore() throws IOException, InterruptedException { ThreadPool threadPool = new TestThreadPool(getClass().getSimpleName()); try (Directory directory = newDirectory()) { From 6eb4600e93ac1ee3ac027c3cdad9823b43048bf8 Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Wed, 5 Jun 2019 17:30:31 -0600 Subject: [PATCH 084/210] Add custom metadata to snapshots (#41281) Adds a metadata field to snapshots which can be used to store arbitrary key-value information. This may be useful for attaching a description of why a snapshot was taken, tagging snapshots to make categorization easier, or identifying the source of automatically-created snapshots. --- .../org/elasticsearch/client/SnapshotIT.java | 48 ++++++ docs/reference/modules/snapshots.asciidoc | 9 +- .../test/snapshot.get/10_basic.yml | 39 +++++ .../create/CreateSnapshotRequest.java | 53 ++++++- .../snapshots/get/GetSnapshotsResponse.java | 6 + .../cluster/SnapshotsInProgress.java | 30 +++- .../repositories/FilterRepository.java | 6 +- .../repositories/Repository.java | 4 +- .../blobstore/BlobStoreRepository.java | 5 +- .../elasticsearch/snapshots/SnapshotInfo.java | 71 +++++++-- .../snapshots/SnapshotsService.java | 11 +- .../create/CreateSnapshotRequestTests.java | 63 ++++++++ .../create/CreateSnapshotResponseTests.java | 15 +- .../get/GetSnapshotsResponseTests.java | 19 ++- .../cluster/ClusterStateDiffIT.java | 14 +- .../cluster/SnapshotsInProgressTests.java | 3 +- .../MetaDataDeleteIndexServiceTests.java | 4 +- .../MetaDataIndexStateServiceTests.java | 4 +- .../RepositoriesServiceTests.java | 3 +- .../SharedClusterSnapshotRestoreIT.java | 3 +- .../snapshots/SnapshotInfoTests.java | 149 ++++++++++++++++++ ...SnapshotsInProgressSerializationTests.java | 3 +- .../index/shard/RestoreOnlyRepository.java | 2 +- .../xpack/ccr/repository/CcrRepository.java | 3 +- 24 files changed, 518 insertions(+), 49 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/snapshots/SnapshotInfoTests.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 616850c513a..5c30de5c057 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -41,9 +41,13 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.RestoreInfo; +import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; @@ -139,6 +143,9 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { CreateSnapshotRequest request = new CreateSnapshotRequest(repository, snapshot); boolean waitForCompletion = randomBoolean(); request.waitForCompletion(waitForCompletion); + if (randomBoolean()) { + request.userMetadata(randomUserMetadata()); + } request.partial(randomBoolean()); request.includeGlobalState(randomBoolean()); @@ -167,6 +174,8 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { CreateSnapshotResponse putSnapshotResponse1 = createTestSnapshot(createSnapshotRequest1); CreateSnapshotRequest createSnapshotRequest2 = new CreateSnapshotRequest(repository, snapshot2); createSnapshotRequest2.waitForCompletion(true); + Map originalMetadata = randomUserMetadata(); + createSnapshotRequest2.userMetadata(originalMetadata); CreateSnapshotResponse putSnapshotResponse2 = createTestSnapshot(createSnapshotRequest2); // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. assertEquals(RestStatus.OK, putSnapshotResponse1.status()); @@ -186,6 +195,15 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { assertEquals(2, response.getSnapshots().size()); assertThat(response.getSnapshots().stream().map((s) -> s.snapshotId().getName()).collect(Collectors.toList()), contains("test_snapshot1", "test_snapshot2")); + Optional> returnedMetadata = response.getSnapshots().stream() + .filter(s -> s.snapshotId().getName().equals("test_snapshot2")) + .findFirst() + .map(SnapshotInfo::userMetadata); + if (returnedMetadata.isPresent()) { + assertEquals(originalMetadata, returnedMetadata.get()); + } else { + assertNull("retrieved metadata is null, expected non-null metadata", originalMetadata); + } } public void testSnapshotsStatus() throws IOException { @@ -231,6 +249,9 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot); createSnapshotRequest.indices(testIndex); createSnapshotRequest.waitForCompletion(true); + if (randomBoolean()) { + createSnapshotRequest.userMetadata(randomUserMetadata()); + } CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest); assertEquals(RestStatus.OK, createSnapshotResponse.status()); @@ -261,6 +282,9 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); createSnapshotRequest.waitForCompletion(true); + if (randomBoolean()) { + createSnapshotRequest.userMetadata(randomUserMetadata()); + } CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest); // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. assertEquals(RestStatus.OK, createSnapshotResponse.status()); @@ -270,4 +294,28 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { assertTrue(response.isAcknowledged()); } + + private static Map randomUserMetadata() { + if (randomBoolean()) { + return null; + } + + Map metadata = new HashMap<>(); + long fields = randomLongBetween(0, 4); + for (int i = 0; i < fields; i++) { + if (randomBoolean()) { + metadata.put(randomValueOtherThanMany(metadata::containsKey, () -> randomAlphaOfLengthBetween(2,10)), + randomAlphaOfLengthBetween(5, 5)); + } else { + Map nested = new HashMap<>(); + long nestedFields = randomLongBetween(0, 4); + for (int j = 0; j < nestedFields; j++) { + nested.put(randomValueOtherThanMany(nested::containsKey, () -> randomAlphaOfLengthBetween(2,10)), + randomAlphaOfLengthBetween(5, 5)); + } + metadata.put(randomValueOtherThanMany(metadata::containsKey, () -> randomAlphaOfLengthBetween(2,10)), nested); + } + } + return metadata; + } } diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index d5d1e441660..9a33cdd7214 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -349,7 +349,11 @@ PUT /_snapshot/my_backup/snapshot_2?wait_for_completion=true { "indices": "index_1,index_2", "ignore_unavailable": true, - "include_global_state": false + "include_global_state": false, + "_meta": { + "taken_by": "kimchy", + "taken_because": "backup before upgrading" + } } ----------------------------------- // CONSOLE @@ -363,6 +367,9 @@ By setting `include_global_state` to false it's possible to prevent the cluster the snapshot. By default, the entire snapshot will fail if one or more indices participating in the snapshot don't have all primary shards available. This behaviour can be changed by setting `partial` to `true`. +The `_meta` field can be used to attach arbitrary metadata to the snapshot. This may be a record of who took the snapshot, +why it was taken, or any other data that might be useful. + Snapshot names can be automatically derived using <>, similarly as when creating new indices. Note that special characters need to be URI encoded. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml index aa15ca34ff0..00656be2b59 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -87,6 +87,7 @@ setup: - is_false: snapshots.0.failures - is_false: snapshots.0.shards - is_false: snapshots.0.version + - is_false: snapshots.0._meta - do: snapshot.delete: @@ -152,3 +153,41 @@ setup: snapshot.delete: repository: test_repo_get_1 snapshot: test_snapshot_without_include_global_state + +--- +"Get snapshot info with metadata": + - skip: + version: " - 7.9.99" + reason: "https://github.com/elastic/elasticsearch/pull/41281 not yet backported to 7.x" + + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + snapshot.create: + repository: test_repo_get_1 + snapshot: test_snapshot_with_metadata + wait_for_completion: true + body: | + { "metadata": {"taken_by": "test", "foo": {"bar": "baz"}} } + + - do: + snapshot.get: + repository: test_repo_get_1 + snapshot: test_snapshot_with_metadata + + - is_true: snapshots + - match: { snapshots.0.snapshot: test_snapshot_with_metadata } + - match: { snapshots.0.state: SUCCESS } + - match: { snapshots.0.metadata.taken_by: test } + - match: { snapshots.0.metadata.foo.bar: baz } + + - do: + snapshot.delete: + repository: test_repo_get_1 + snapshot: test_snapshot_with_metadata diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 15fbac35bff..a72120e328b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -19,12 +19,14 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -46,6 +48,7 @@ import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.snapshots.SnapshotInfo.METADATA_FIELD_INTRODUCED; /** * Create snapshot request @@ -63,6 +66,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBo */ public class CreateSnapshotRequest extends MasterNodeRequest implements IndicesRequest.Replaceable, ToXContentObject { + public static int MAXIMUM_METADATA_BYTES = 1024; // chosen arbitrarily private String snapshot; @@ -80,6 +84,8 @@ public class CreateSnapshotRequest extends MasterNodeRequest userMetadata; + public CreateSnapshotRequest() { } @@ -104,6 +110,9 @@ public class CreateSnapshotRequest extends MasterNodeRequest MAXIMUM_METADATA_BYTES) { + validationException = addValidationError("metadata must be smaller than 1024 bytes, but was [" + metadataSize + "]", + validationException); + } return validationException; } + private static int metadataSize(Map userMetadata) { + if (userMetadata == null) { + return 0; + } + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + builder.value(userMetadata); + int size = BytesReference.bytes(builder).length(); + return size; + } catch (IOException e) { + // This should not be possible as we are just rendering the xcontent in memory + throw new ElasticsearchException(e); + } + } + /** * Sets the snapshot name * @@ -378,6 +409,15 @@ public class CreateSnapshotRequest extends MasterNodeRequest userMetadata() { + return userMetadata; + } + + public CreateSnapshotRequest userMetadata(Map userMetadata) { + this.userMetadata = userMetadata; + return this; + } + /** * Parses snapshot definition. * @@ -405,6 +445,11 @@ public class CreateSnapshotRequest extends MasterNodeRequest) entry.getValue()); } else if (name.equals("include_global_state")) { includeGlobalState = nodeBooleanValue(entry.getValue(), "include_global_state"); + } else if (name.equals("metadata")) { + if (entry.getValue() != null && (entry.getValue() instanceof Map == false)) { + throw new IllegalArgumentException("malformed metadata, should be an object"); + } + userMetadata((Map) entry.getValue()); } } indicesOptions(IndicesOptions.fromMap(source, indicesOptions)); @@ -433,6 +478,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest, ToXContent, private static final String TOTAL_SHARDS = "total_shards"; private static final String SUCCESSFUL_SHARDS = "successful_shards"; private static final String INCLUDE_GLOBAL_STATE = "include_global_state"; + private static final String USER_METADATA = "metadata"; private static final Version INCLUDE_GLOBAL_STATE_INTRODUCED = Version.V_6_2_0; @@ -90,6 +93,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, private long endTime = 0L; private ShardStatsBuilder shardStatsBuilder = null; private Boolean includeGlobalState = null; + private Map userMetadata = null; private int version = -1; private List shardFailures = null; @@ -129,6 +133,10 @@ public final class SnapshotInfo implements Comparable, ToXContent, this.includeGlobalState = includeGlobalState; } + private void setUserMetadata(Map userMetadata) { + this.userMetadata = userMetadata; + } + private void setVersion(int version) { this.version = version; } @@ -155,7 +163,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, } return new SnapshotInfo(snapshotId, indices, snapshotState, reason, version, startTime, endTime, - totalShards, successfulShards, shardFailures, includeGlobalState); + totalShards, successfulShards, shardFailures, includeGlobalState, userMetadata); } } @@ -196,6 +204,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setUserMetadata, (p, c) -> p.map() , new ParseField(USER_METADATA)); SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setShardFailures, SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, new ParseField(FAILURES)); @@ -225,6 +234,9 @@ public final class SnapshotInfo implements Comparable, ToXContent, @Nullable private Boolean includeGlobalState; + @Nullable + private final Map userMetadata; + @Nullable private final Version version; @@ -232,28 +244,30 @@ public final class SnapshotInfo implements Comparable, ToXContent, public SnapshotInfo(SnapshotId snapshotId, List indices, SnapshotState state) { this(snapshotId, indices, state, null, null, 0L, 0L, 0, 0, - Collections.emptyList(), null); + Collections.emptyList(), null, null); } public SnapshotInfo(SnapshotId snapshotId, List indices, SnapshotState state, Version version) { this(snapshotId, indices, state, null, version, 0L, 0L, 0, 0, - Collections.emptyList(), null); + Collections.emptyList(), null, null); } - public SnapshotInfo(SnapshotId snapshotId, List indices, long startTime, Boolean includeGlobalState) { + public SnapshotInfo(SnapshotId snapshotId, List indices, long startTime, Boolean includeGlobalState, + Map userMetadata) { this(snapshotId, indices, SnapshotState.IN_PROGRESS, null, Version.CURRENT, startTime, 0L, - 0, 0, Collections.emptyList(), includeGlobalState); + 0, 0, Collections.emptyList(), includeGlobalState, userMetadata); } public SnapshotInfo(SnapshotId snapshotId, List indices, long startTime, String reason, long endTime, - int totalShards, List shardFailures, Boolean includeGlobalState) { + int totalShards, List shardFailures, Boolean includeGlobalState, + Map userMetadata) { this(snapshotId, indices, snapshotState(reason, shardFailures), reason, Version.CURRENT, - startTime, endTime, totalShards, totalShards - shardFailures.size(), shardFailures, includeGlobalState); + startTime, endTime, totalShards, totalShards - shardFailures.size(), shardFailures, includeGlobalState, userMetadata); } private SnapshotInfo(SnapshotId snapshotId, List indices, SnapshotState state, String reason, Version version, long startTime, long endTime, int totalShards, int successfulShards, List shardFailures, - Boolean includeGlobalState) { + Boolean includeGlobalState, Map userMetadata) { this.snapshotId = Objects.requireNonNull(snapshotId); this.indices = Collections.unmodifiableList(Objects.requireNonNull(indices)); this.state = state; @@ -265,6 +279,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, this.successfulShards = successfulShards; this.shardFailures = Objects.requireNonNull(shardFailures); this.includeGlobalState = includeGlobalState; + this.userMetadata = userMetadata; } /** @@ -298,6 +313,11 @@ public final class SnapshotInfo implements Comparable, ToXContent, if (in.getVersion().onOrAfter(INCLUDE_GLOBAL_STATE_INTRODUCED)) { includeGlobalState = in.readOptionalBoolean(); } + if (in.getVersion().onOrAfter(METADATA_FIELD_INTRODUCED)) { + userMetadata = in.readMap(); + } else { + userMetadata = null; + } } /** @@ -308,7 +328,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, return new SnapshotInfo(snapshotId, Collections.emptyList(), SnapshotState.INCOMPATIBLE, "the snapshot is incompatible with the current version of Elasticsearch and its exact version is unknown", null, 0L, 0L, 0, 0, - Collections.emptyList(), null); + Collections.emptyList(), null, null); } /** @@ -432,6 +452,15 @@ public final class SnapshotInfo implements Comparable, ToXContent, return version; } + /** + * Returns the custom metadata that was attached to this snapshot at creation time. + * @return custom metadata + */ + @Nullable + public Map userMetadata() { + return userMetadata; + } + /** * Compares two snapshots by their start time; if the start times are the same, then * compares the two snapshots by their snapshot ids. @@ -496,6 +525,9 @@ public final class SnapshotInfo implements Comparable, ToXContent, if (includeGlobalState != null) { builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState); } + if (userMetadata != null) { + builder.field(USER_METADATA, userMetadata); + } if (verbose || state != null) { builder.field(STATE, state); } @@ -547,6 +579,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, if (includeGlobalState != null) { builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState); } + builder.field(USER_METADATA, userMetadata); builder.field(START_TIME, startTime); builder.field(END_TIME, endTime); builder.field(TOTAL_SHARDS, totalShards); @@ -577,6 +610,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, int totalShards = 0; int successfulShards = 0; Boolean includeGlobalState = null; + Map userMetadata = null; List shardFailures = Collections.emptyList(); if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); @@ -632,8 +666,12 @@ public final class SnapshotInfo implements Comparable, ToXContent, parser.skipChildren(); } } else if (token == XContentParser.Token.START_OBJECT) { - // It was probably created by newer version - ignoring - parser.skipChildren(); + if (USER_METADATA.equals(currentFieldName)) { + userMetadata = parser.map(); + } else { + // It was probably created by newer version - ignoring + parser.skipChildren(); + } } } } @@ -655,7 +693,8 @@ public final class SnapshotInfo implements Comparable, ToXContent, totalShards, successfulShards, shardFailures, - includeGlobalState); + includeGlobalState, + userMetadata); } @Override @@ -689,6 +728,9 @@ public final class SnapshotInfo implements Comparable, ToXContent, if (out.getVersion().onOrAfter(INCLUDE_GLOBAL_STATE_INTRODUCED)) { out.writeOptionalBoolean(includeGlobalState); } + if (out.getVersion().onOrAfter(METADATA_FIELD_INTRODUCED)) { + out.writeMap(userMetadata); + } } private static SnapshotState snapshotState(final String reason, final List shardFailures) { @@ -718,13 +760,14 @@ public final class SnapshotInfo implements Comparable, ToXContent, Objects.equals(indices, that.indices) && Objects.equals(includeGlobalState, that.includeGlobalState) && Objects.equals(version, that.version) && - Objects.equals(shardFailures, that.shardFailures); + Objects.equals(shardFailures, that.shardFailures) && + Objects.equals(userMetadata, that.userMetadata); } @Override public int hashCode() { return Objects.hash(snapshotId, state, reason, indices, startTime, endTime, - totalShards, successfulShards, includeGlobalState, version, shardFailures); + totalShards, successfulShards, includeGlobalState, version, shardFailures, userMetadata); } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index b1d365f7ff1..1563facd335 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -287,7 +287,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus snapshotIndices, System.currentTimeMillis(), repositoryData.getGenId(), - null); + null, + request.userMetadata()); initializingSnapshots.add(newSnapshot.snapshot()); snapshots = new SnapshotsInProgress(newSnapshot); } else { @@ -557,7 +558,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus 0, Collections.emptyList(), snapshot.getRepositoryStateId(), - snapshot.includeGlobalState()); + snapshot.includeGlobalState(), + snapshot.userMetadata()); } catch (Exception inner) { inner.addSuppressed(exception); logger.warn(() -> new ParameterizedMessage("[{}] failed to close snapshot in repository", @@ -572,7 +574,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus private static SnapshotInfo inProgressSnapshot(SnapshotsInProgress.Entry entry) { return new SnapshotInfo(entry.snapshot().getSnapshotId(), entry.indices().stream().map(IndexId::getName).collect(Collectors.toList()), - entry.startTime(), entry.includeGlobalState()); + entry.startTime(), entry.includeGlobalState(), entry.userMetadata()); } /** @@ -988,7 +990,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus entry.shards().size(), unmodifiableList(shardFailures), entry.getRepositoryStateId(), - entry.includeGlobalState()); + entry.includeGlobalState(), + entry.userMetadata()); removeSnapshotFromClusterState(snapshot, snapshotInfo, null); logger.info("snapshot [{}] completed with state [{}]", snapshot, snapshotInfo.state()); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java index 0b598be6849..9f7bd5f6a01 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java @@ -19,10 +19,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions.Option; import org.elasticsearch.action.support.IndicesOptions.WildcardStates; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent.MapParams; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -41,6 +43,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.snapshots.SnapshotInfoTests.randomUserMetadata; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + public class CreateSnapshotRequestTests extends ESTestCase { // tests creating XContent and parsing with source(Map) equivalency @@ -80,6 +86,10 @@ public class CreateSnapshotRequestTests extends ESTestCase { original.includeGlobalState(randomBoolean()); } + if (randomBoolean()) { + original.userMetadata(randomUserMetadata()); + } + if (randomBoolean()) { Collection wildcardStates = randomSubsetOf(Arrays.asList(WildcardStates.values())); Collection