diff --git a/build.gradle b/build.gradle index 3674e0a540b..0df5b97ae4a 100644 --- a/build.gradle +++ b/build.gradle @@ -87,8 +87,15 @@ subprojects { } } } + repositories { + maven { + name = 'localTest' + url = "${rootProject.buildDir}/local-test-repo" + } + } } } + plugins.withType(BuildPlugin).whenPluginAdded { project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt') project.noticeFile = project.rootProject.file('NOTICE.txt') @@ -228,6 +235,7 @@ subprojects { "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}": ':client:rest-high-level', "org.elasticsearch.client:test:${version}": ':client:test', "org.elasticsearch.client:transport:${version}": ':client:transport', + "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${version}": ':modules:lang-painless:spi', "org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:archives:integ-test-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:archives:zip', diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 5775b2b6323..967c2e27ee8 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -162,11 +162,24 @@ if (project != rootProject) { // it's fine as we run them as part of :buildSrc test.enabled = false task integTest(type: Test) { + // integration test requires the local testing repo for example plugin builds + dependsOn project.rootProject.allprojects.collect { + it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'} + } exclude "**/*Tests.class" include "**/*IT.class" testClassesDirs = sourceSets.test.output.classesDirs classpath = sourceSets.test.runtimeClasspath inputs.dir(file("src/testKit")) + // tell BuildExamplePluginsIT where to find the example plugins + systemProperty ( + 'test.build-tools.plugin.examples', + files( + project(':example-plugins').subprojects.collect { it.projectDir } + ).asPath, + ) + systemProperty 'test.local-test-repo-path', "${rootProject.buildDir}/local-test-repo" + systemProperty 'test.lucene-snapshot-revision', (versions.lucene =~ /\w+-snapshot-([a-z0-9]+)/)[0][1] } check.dependsOn(integTest) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 306a2bcb58b..bf3ffcabe2f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -554,7 +554,7 @@ class BuildPlugin implements Plugin { project.publishing { publications { nebula(MavenPublication) { - artifact project.tasks.shadowJar + artifacts = [ project.tasks.shadowJar ] artifactId = project.archivesBaseName /* * Configure the pom to include the "shadow" as compile dependencies @@ -584,7 +584,6 @@ class BuildPlugin implements Plugin { } } } - } /** Adds compiler settings to the project */ @@ -799,6 +798,8 @@ class BuildPlugin implements Plugin { systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' + // TODO: remove this deprecation compatibility setting for 7.0 + systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false' systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion() if (project.ext.inFipsJvm) { systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 00f178fda9c..6f42e41beaa 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -25,7 +25,6 @@ import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.InvalidUserDataException -import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.XmlProvider @@ -39,7 +38,6 @@ import java.nio.file.Path import java.nio.file.StandardCopyOption import java.util.regex.Matcher import java.util.regex.Pattern - /** * Encapsulates build configuration for an Elasticsearch plugin. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index 6cfe44c8068..c250d7695a8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.plugin import org.gradle.api.Project import org.gradle.api.tasks.Input +import org.gradle.api.tasks.InputFile /** * A container for plugin properties that will be written to the plugin descriptor, for easy @@ -55,18 +56,39 @@ class PluginPropertiesExtension { boolean requiresKeystore = false /** A license file that should be included in the built plugin zip. */ - @Input - File licenseFile = null + private File licenseFile = null /** * A notice file that should be included in the built plugin zip. This will be * extended with notices from the {@code licenses/} directory. */ - @Input - File noticeFile = null + private File noticeFile = null + + Project project = null PluginPropertiesExtension(Project project) { name = project.name version = project.version + this.project = project + } + + @InputFile + File getLicenseFile() { + return licenseFile + } + + void setLicenseFile(File licenseFile) { + project.ext.licenseFile = licenseFile + this.licenseFile = licenseFile + } + + @InputFile + File getNoticeFile() { + return noticeFile + } + + void setNoticeFile(File noticeFile) { + project.ext.noticeFile = noticeFile + this.noticeFile = noticeFile } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 8e913153f05..9588f77a71d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -23,7 +23,6 @@ import org.gradle.api.InvalidUserDataException import org.gradle.api.Task import org.gradle.api.tasks.Copy import org.gradle.api.tasks.OutputFile - /** * Creates a plugin descriptor. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 0dd56b86332..aaf4e468182 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -177,6 +177,12 @@ class NodeInfo { javaVersion = 8 } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { javaVersion = 9 + } else if (project.inFipsJvm && nodeVersion.onOrAfter("6.3.0") && nodeVersion.before("6.4.0")) { + /* + * Elasticsearch versions before 6.4.0 cannot be run in a FIPS-140 JVM. If we're running + * bwc tests in a FIPS-140 JVM, ensure that the pre v6.4.0 nodes use a Java 10 JVM instead. + */ + javaVersion = 10 } args.addAll("-E", "node.portsfile=true") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index d2101c48aab..2838849981a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -31,6 +31,7 @@ import org.gradle.api.provider.Provider import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Input import org.gradle.api.tasks.TaskState +import org.gradle.plugins.ide.idea.IdeaPlugin import java.nio.charset.StandardCharsets import java.nio.file.Files @@ -243,10 +244,12 @@ public class RestIntegTestTask extends DefaultTask { } } } - project.idea { - module { - if (scopes.TEST != null) { - scopes.TEST.plus.add(project.configurations.restSpec) + if (project.plugins.hasPlugin(IdeaPlugin)) { + project.idea { + module { + if (scopes.TEST != null) { + scopes.TEST.plus.add(project.configurations.restSpec) + } } } } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java new file mode 100644 index 00000000000..9b63d6f45e0 --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java @@ -0,0 +1,164 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.commons.io.FileUtils; +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.GradleRunner; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class BuildExamplePluginsIT extends GradleIntegrationTestCase { + + private static List EXAMPLE_PLUGINS = Collections.unmodifiableList( + Arrays.stream( + Objects.requireNonNull(System.getProperty("test.build-tools.plugin.examples")) + .split(File.pathSeparator) + ).map(File::new).collect(Collectors.toList()) + ); + + @Rule + public TemporaryFolder tmpDir = new TemporaryFolder(); + + public final File examplePlugin; + + public BuildExamplePluginsIT(File examplePlugin) { + this.examplePlugin = examplePlugin; + } + + @BeforeClass + public static void assertProjectsExist() { + assertEquals( + EXAMPLE_PLUGINS, + EXAMPLE_PLUGINS.stream().filter(File::exists).collect(Collectors.toList()) + ); + } + + @ParametersFactory + public static Iterable parameters() { + return EXAMPLE_PLUGINS + .stream() + .map(each -> new Object[] {each}) + .collect(Collectors.toList()); + } + + public void testCurrentExamplePlugin() throws IOException { + FileUtils.copyDirectory(examplePlugin, tmpDir.getRoot()); + // just get rid of deprecation warnings + Files.write( + getTempPath("settings.gradle"), + "enableFeaturePreview('STABLE_PUBLISHING')\n".getBytes(StandardCharsets.UTF_8) + ); + + adaptBuildScriptForTest(); + + Files.write( + tmpDir.newFile("NOTICE.txt").toPath(), + "dummy test notice".getBytes(StandardCharsets.UTF_8) + ); + + GradleRunner.create() + .withProjectDir(tmpDir.getRoot()) + .withArguments("clean", "check", "-s", "-i", "--warning-mode=all", "--scan") + .withPluginClasspath() + .build(); + } + + private void adaptBuildScriptForTest() throws IOException { + // Add the local repo as a build script URL so we can pull in build-tools and apply the plugin under test + // + is ok because we have no other repo and just want to pick up latest + writeBuildScript( + "buildscript {\n" + + " repositories {\n" + + " maven {\n" + + " url = '" + getLocalTestRepoPath() + "'\n" + + " }\n" + + " }\n" + + " dependencies {\n" + + " classpath \"org.elasticsearch.gradle:build-tools:+\"\n" + + " }\n" + + "}\n" + ); + // get the original file + Files.readAllLines(getTempPath("build.gradle"), StandardCharsets.UTF_8) + .stream() + .map(line -> line + "\n") + .forEach(this::writeBuildScript); + // Add a repositories section to be able to resolve dependencies + String luceneSnapshotRepo = ""; + String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision"); + if (luceneSnapshotRepo != null) { + luceneSnapshotRepo = " maven {\n" + + " url \"http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" + + " }\n"; + } + writeBuildScript("\n" + + "repositories {\n" + + " maven {\n" + + " url \"" + getLocalTestRepoPath() + "\"\n" + + " }\n" + + luceneSnapshotRepo + + "}\n" + ); + Files.delete(getTempPath("build.gradle")); + Files.move(getTempPath("build.gradle.new"), getTempPath("build.gradle")); + System.err.print("Generated build script is:"); + Files.readAllLines(getTempPath("build.gradle")).forEach(System.err::println); + } + + private Path getTempPath(String fileName) { + return new File(tmpDir.getRoot(), fileName).toPath(); + } + + private Path writeBuildScript(String script) { + try { + Path path = getTempPath("build.gradle.new"); + return Files.write( + path, + script.getBytes(StandardCharsets.UTF_8), + Files.exists(path) ? StandardOpenOption.APPEND : StandardOpenOption.CREATE_NEW + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private String getLocalTestRepoPath() { + String property = System.getProperty("test.local-test-repo-path"); + Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests"); + File file = new File(property); + assertTrue("Expected " + property + " to exist, but it did not!", file.exists()); + return file.getAbsolutePath(); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java new file mode 100644 index 00000000000..e26a4c629a0 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.RequestConverters.EndpointBuilder; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; + +import java.io.IOException; + +import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; +import static org.elasticsearch.client.RequestConverters.createEntity; + +final class MLRequestConverters { + + private MLRequestConverters() {} + + static Request putJob(PutJobRequest putJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(putJobRequest.getJob().getId()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request openJob(OpenJobRequest openJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(openJobRequest.getJobId()) + .addPathPartAsIs("_open") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setJsonEntity(openJobRequest.toString()); + return request; + } + + static Request deleteJob(DeleteJobRequest deleteJobRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(deleteJobRequest.getJobId()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.putParam("force", Boolean.toString(deleteJobRequest.isForce())); + + return request; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index a3e5ba72b77..32b6cd6cf2c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -19,6 +19,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -55,7 +57,7 @@ public final class MachineLearningClient { */ public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - RequestConverters::putMachineLearningJob, + MLRequestConverters::putJob, options, PutJobResponse::fromXContent, Collections.emptySet()); @@ -73,13 +75,51 @@ public final class MachineLearningClient { */ public void putJobAsync(PutJobRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, - RequestConverters::putMachineLearningJob, + MLRequestConverters::putJob, options, PutJobResponse::fromXContent, listener, Collections.emptySet()); } + /** + * Deletes the given Machine Learning Job + *

+ * For additional info + * see ML Delete Job documentation + *

+ * @param request the request to delete the job + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return action acknowledgement + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::deleteJob, + options, + DeleteJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion + *

+ * For additional info + * see ML Delete Job documentation + *

+ * @param request the request to delete the job + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::deleteJob, + options, + DeleteJobResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Opens a Machine Learning Job. * When you open a new job, it starts with an empty model. @@ -98,7 +138,7 @@ public final class MachineLearningClient { */ public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - RequestConverters::machineLearningOpenJob, + MLRequestConverters::openJob, options, OpenJobResponse::fromXContent, Collections.emptySet()); @@ -120,7 +160,7 @@ public final class MachineLearningClient { */ public void openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, - RequestConverters::machineLearningOpenJob, + MLRequestConverters::openJob, options, OpenJobResponse::fromXContent, listener, diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 973c0ce126d..0e5fce5b227 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -112,8 +112,6 @@ import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; -import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; -import org.elasticsearch.protocol.xpack.ml.PutJobRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -1199,31 +1197,6 @@ final class RequestConverters { return request; } - static Request putMachineLearningJob(PutJobRequest putJobRequest) throws IOException { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(putJobRequest.getJob().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request machineLearningOpenJob(OpenJobRequest openJobRequest) throws IOException { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(openJobRequest.getJobId()) - .addPathPartAsIs("_open") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setJsonEntity(openJobRequest.toString()); - return request; - } - static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { EndpointBuilder endpointBuilder = new EndpointBuilder() .addPathPartAsIs("_xpack/migration/assistance") @@ -1235,7 +1208,7 @@ final class RequestConverters { return request; } - private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { + static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java new file mode 100644 index 00000000000..43a41960e00 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.protocol.xpack.ml.job.config.Detector; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class MLRequestConvertersTests extends ESTestCase { + + public void testPutJob() throws IOException { + Job job = createValidJob("foo"); + PutJobRequest putJobRequest = new PutJobRequest(job); + + Request request = MLRequestConverters.putJob(putJobRequest); + + assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + Job parsedJob = Job.PARSER.apply(parser, null).build(); + assertThat(parsedJob, equalTo(job)); + } + } + + public void testOpenJob() throws Exception { + String jobId = "some-job-id"; + OpenJobRequest openJobRequest = new OpenJobRequest(jobId); + openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); + + Request request = MLRequestConverters.openJob(openJobRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint()); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + request.getEntity().writeTo(bos); + assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}"); + } + + public void testDeleteJob() { + String jobId = randomAlphaOfLength(10); + DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId); + + Request request = MLRequestConverters.deleteJob(deleteJobRequest); + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint()); + assertEquals(Boolean.toString(false), request.getParameters().get("force")); + + deleteJobRequest.setForce(true); + request = MLRequestConverters.deleteJob(deleteJobRequest); + assertEquals(Boolean.toString(true), request.getParameters().get("force")); + } + + private static Job createValidJob(String jobId) { + AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList( + Detector.builder().setFunction("count").build())); + Job.Builder jobBuilder = Job.builder(jobId); + jobBuilder.setAnalysisConfig(analysisConfig); + return jobBuilder.build(); + } +} \ No newline at end of file diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 94e73a14c18..0037460150f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -20,6 +20,8 @@ package org.elasticsearch.client; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -48,6 +50,19 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertThat(createdJob.getJobType(), is(Job.ANOMALY_DETECTOR_JOB_TYPE)); } + public void testDeleteJob() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + DeleteJobResponse response = execute(new DeleteJobRequest(jobId), + machineLearningClient::deleteJob, + machineLearningClient::deleteJobAsync); + + assertTrue(response.isAcknowledged()); + } + public void testOpenJob() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 1c9707e0e27..47195f0bb2a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -127,7 +127,6 @@ import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; -import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; @@ -2611,19 +2610,6 @@ public class RequestConvertersTests extends ESTestCase { assertThat(request.getEntity(), nullValue()); } - public void testPostMachineLearningOpenJob() throws Exception { - String jobId = "some-job-id"; - OpenJobRequest openJobRequest = new OpenJobRequest(jobId); - openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); - - Request request = RequestConverters.machineLearningOpenJob(openJobRequest); - assertEquals(HttpPost.METHOD_NAME, request.getMethod()); - assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint()); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - request.getEntity().writeTo(bos); - assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}"); - } - /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 50cd244c0fa..a77d8b43e57 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.client.MachineLearningIT; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -122,6 +124,56 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + public void testDeleteJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + String jobId = "my-first-machine-learning-job"; + + Job job = MachineLearningIT.buildJob(jobId); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + Job secondJob = MachineLearningIT.buildJob("my-second-machine-learning-job"); + client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); + + { + //tag::x-pack-delete-ml-job-request + DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); + deleteJobRequest.setForce(false); //<1> + DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); + //end::x-pack-delete-ml-job-request + + //tag::x-pack-delete-ml-job-response + boolean isAcknowledged = deleteJobResponse.isAcknowledged(); //<1> + //end::x-pack-delete-ml-job-response + } + { + //tag::x-pack-delete-ml-job-request-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(DeleteJobResponse deleteJobResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-delete-ml-job-request-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + //tag::x-pack-delete-ml-job-request-async + DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job"); + client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1> + //end::x-pack-delete-ml-job-request-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testOpenJob() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -143,7 +195,6 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { //end::x-pack-ml-open-job-execute } - { //tag::x-pack-ml-open-job-listener ActionListener listener = new ActionListener() { @@ -154,7 +205,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @Override public void onFailure(Exception e) { - //<2> + // <2> } }; //end::x-pack-ml-open-job-listener @@ -169,6 +220,5 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } - } } diff --git a/docs/build.gradle b/docs/build.gradle index 029147bba2f..8ee5c8a8e53 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -41,6 +41,9 @@ integTestCluster { // TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults systemProperty 'es.scripting.use_java_time', 'false' systemProperty 'es.scripting.update.ctx_in_params', 'false' + + // TODO: remove this deprecation compatibility setting for 7.0 + systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false' } // remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed @@ -400,25 +403,25 @@ buildRestTests.setups['stored_scripted_metric_script'] = ''' - do: put_script: id: "my_init_script" - body: { "script": { "lang": "painless", "source": "params._agg.transactions = []" } } + body: { "script": { "lang": "painless", "source": "state.transactions = []" } } - match: { acknowledged: true } - do: put_script: id: "my_map_script" - body: { "script": { "lang": "painless", "source": "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)" } } + body: { "script": { "lang": "painless", "source": "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)" } } - match: { acknowledged: true } - do: put_script: id: "my_combine_script" - body: { "script": { "lang": "painless", "source": "double profit = 0;for (t in params._agg.transactions) { profit += t; } return profit" } } + body: { "script": { "lang": "painless", "source": "double profit = 0;for (t in state.transactions) { profit += t; } return profit" } } - match: { acknowledged: true } - do: put_script: id: "my_reduce_script" - body: { "script": { "lang": "painless", "source": "double profit = 0;for (a in params._aggs) { profit += a; } return profit" } } + body: { "script": { "lang": "painless", "source": "double profit = 0;for (a in states) { profit += a; } return profit" } } - match: { acknowledged: true } ''' diff --git a/docs/java-rest/high-level/ml/delete-job.asciidoc b/docs/java-rest/high-level/ml/delete-job.asciidoc new file mode 100644 index 00000000000..44a6a479409 --- /dev/null +++ b/docs/java-rest/high-level/ml/delete-job.asciidoc @@ -0,0 +1,49 @@ +[[java-rest-high-x-pack-ml-delete-job]] +=== Delete Job API + +[[java-rest-high-x-pack-machine-learning-delete-job-request]] +==== Delete Job Request + +A `DeleteJobRequest` object requires a non-null `jobId` and can optionally set `force`. +Can be executed as follows: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request] +--------------------------------------------------- +<1> Use to forcefully delete an opened job; +this method is quicker than closing and deleting the job. +Defaults to `false` + +[[java-rest-high-x-pack-machine-learning-delete-job-response]] +==== Delete Job Response + +The returned `DeleteJobResponse` object indicates the acknowledgement of the request: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response] +--------------------------------------------------- +<1> `isAcknowledged` was the deletion request acknowledged or not + +[[java-rest-high-x-pack-machine-learning-delete-job-async]] +==== Delete Job Asynchronously + +This request can also be made asynchronously. +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-async] +--------------------------------------------------- +<1> The `DeleteJobRequest` to execute and the `ActionListener` to alert on completion or error. + +The deletion request returns immediately. Once the request is completed, the `ActionListener` is +called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when +making the request. + +A typical listener for a `DeleteJobRequest` could be defined as follows: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-listener] +--------------------------------------------------- +<1> The action to be taken when it is completed +<2> What to do when a failure occurs diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index a2db3436317..6bcb736243a 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -205,9 +205,11 @@ include::licensing/delete-license.asciidoc[] The Java High Level REST Client supports the following Machine Learning APIs: * <> +* <> * <> include::ml/put-job.asciidoc[] +include::ml/delete-job.asciidoc[] include::ml/open-job.asciidoc[] == Migration APIs diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index 90f2c685fda..8bffe5193ed 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -17,14 +17,11 @@ Integrations are not plugins, but are external tools or modules that make it eas * https://drupal.org/project/elasticsearch_connector[Drupal]: Drupal Elasticsearch integration. -* https://wordpress.org/plugins/wpsolr-search-engine/[WPSOLR]: - Elasticsearch (and Apache Solr) WordPress Plugin - -* http://searchbox-io.github.com/wp-elasticsearch/[Wp-Elasticsearch]: +* https://wordpress.org/plugins/elasticpress/[ElasticPress]: Elasticsearch WordPress Plugin -* https://github.com/wallmanderco/elasticsearch-indexer[Elasticsearch Indexer]: - Elasticsearch WordPress Plugin +* https://wordpress.org/plugins/wpsolr-search-engine/[WPSOLR]: + Elasticsearch (and Apache Solr) WordPress Plugin * https://doc.tiki.org/Elasticsearch[Tiki Wiki CMS Groupware]: Tiki has native support for Elasticsearch. This provides faster & better diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 7ba7e2da633..1cfc122bee4 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -47,7 +47,7 @@ POST test/_doc/1/_update // TEST[continued] We can add a tag to the list of tags (note, if the tag exists, it -will still add it, since its a list): +will still add it, since it's a list): [source,js] -------------------------------------------------- @@ -65,6 +65,28 @@ POST test/_doc/1/_update // CONSOLE // TEST[continued] +We can remove a tag from the list of tags. Note that the Painless function to +`remove` a tag takes as its parameter the array index of the element you wish +to remove, so you need a bit more logic to locate it while avoiding a runtime +error. Note that if the tag was present more than once in the list, this will +remove only one occurrence of it: + +[source,js] +-------------------------------------------------- +POST test/_doc/1/_update +{ + "script" : { + "source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }", + "lang": "painless", + "params" : { + "tag" : "blue" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + In addition to `_source`, the following variables are available through the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing` and `_now` (the current timestamp). @@ -172,7 +194,7 @@ the request was ignored. "_index": "test", "_type": "_doc", "_id": "1", - "_version": 6, + "_version": 7, "result": "noop" } -------------------------------------------------- diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index b89021e1cfe..3e44b2c41f6 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -93,7 +93,8 @@ Replication is important for two primary reasons: To summarize, each index can be split into multiple shards. An index can also be replicated zero (meaning no replicas) or more times. Once replicated, each index will have primary shards (the original shards that were replicated from) and replica shards (the copies of the primary shards). -The number of shards and replicas can be defined per index at the time the index is created. After the index is created, you may change the number of replicas dynamically anytime but you cannot change the number of shards after-the-fact. + +The number of shards and replicas can be defined per index at the time the index is created. After the index is created, you may also change the number of replicas dynamically anytime. You can change the number of shards for an existing index using the {ref}/indices-shrink-index.html[`_shrink`] and {ref}/indices-split-index.html[`_split`] APIs, however this is not a trivial task and pre-planning for the correct number of shards is the optimal approach. By default, each index in Elasticsearch is allocated one primary shard and one replica which means that if you have at least two nodes in your cluster, your index will have one primary shard and another replica shard (one complete replica) for a total of two shards per index. diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index 11f46509127..094294d8530 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -92,6 +92,9 @@ deprecated in 6.x, has been removed. Context enabled suggestion queries without contexts have to visit every suggestion, which degrades the search performance considerably. +For geo context the value of the `path` parameter is now validated against the mapping, +and the context is only accepted if `path` points to a field with `geo_point` type. + ==== Semantics changed for `max_concurrent_shard_requests` `max_concurrent_shard_requests` used to limit the total number of concurrent shard diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index a5a8e4d008a..d67d8a733ac 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -503,3 +503,31 @@ guide to the {painless}/index.html[Painless Scripting Language]. See the {painless}/painless-api-reference.html[Painless API Reference] in the guide to the {painless}/index.html[Painless Scripting Language]. + +[role="exclude", id="security-api-roles"] +=== Role management APIs + +You can use the following APIs to add, remove, and retrieve roles in the native realm: + +* <>, <> +* <> +* <> + +[role="exclude",id="security-api-tokens"] +=== Token management APIs + +You can use the following APIs to create and invalidate bearer tokens for access +without requiring basic authentication: + +* <>, <> + +[role="exclude",id="security-api-users"] +=== User Management APIs + +You can use the following APIs to create, read, update, and delete users from the +native realm: + +* <>, <> +* <>, <> +* <> +* <> diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index 2a51d705d83..e7c9b593af3 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -90,7 +90,8 @@ And here is a sample response: Set to `false` to return an overall failure if the request would produce partial results. Defaults to true, which will allow partial results in the case of timeouts - or partial failures. + or partial failures. This default can be controlled using the cluster-level setting + `search.default_allow_partial_results`. `terminate_after`:: diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index a90f32bb3cd..279bc0c0384 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -125,5 +125,6 @@ more details on the different types of search that can be performed. |`allow_partial_search_results` |Set to `false` to return an overall failure if the request would produce partial results. Defaults to true, which will allow partial results in the case of timeouts -or partial failures.. +or partial failures. This default can be controlled using the cluster-level setting +`search.default_allow_partial_results`. |======================================================================= diff --git a/docs/reference/setup/important-settings/heap-dump-path.asciidoc b/docs/reference/setup/important-settings/heap-dump-path.asciidoc index b0d301b21d0..fb8c7ff35f0 100644 --- a/docs/reference/setup/important-settings/heap-dump-path.asciidoc +++ b/docs/reference/setup/important-settings/heap-dump-path.asciidoc @@ -8,8 +8,8 @@ distributions, and the `data` directory under the root of the Elasticsearch installation for the <> archive distributions). If this path is not suitable for receiving heap dumps, you should modify the entry `-XX:HeapDumpPath=...` in -<>. If you specify a fixed filename instead -of a directory, the JVM will repeatedly use the same file; this is one -mechanism for preventing heap dumps from accumulating in the heap dump -path. Alternatively, you can configure a scheduled task via your OS to -remove heap dumps that are older than a configured age. +<>. If you specify a directory, the JVM +will generate a filename for the heap dump based on the PID of the running +instance. If you specify a fixed filename instead of a directory, the file must +not exist when the JVM needs to perform a heap dump on an out of memory +exception, otherwise the heap dump will fail. diff --git a/docs/reference/setup/important-settings/network-host.asciidoc b/docs/reference/setup/important-settings/network-host.asciidoc index 7e29e73123d..1788bfebc66 100644 --- a/docs/reference/setup/important-settings/network-host.asciidoc +++ b/docs/reference/setup/important-settings/network-host.asciidoc @@ -9,7 +9,7 @@ location on a single node. This can be useful for testing Elasticsearch's ability to form clusters, but it is not a configuration recommended for production. -In order to communicate and to form a cluster with nodes on other servers, your +In order to form a cluster with nodes on other servers, your node will need to bind to a non-loopback address. While there are many <>, usually all you need to configure is `network.host`: diff --git a/libs/core/src/main/java/org/elasticsearch/common/CharArrays.java b/libs/core/src/main/java/org/elasticsearch/common/CharArrays.java new file mode 100644 index 00000000000..907874ca573 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/common/CharArrays.java @@ -0,0 +1,150 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Objects; + +/** + * Helper class similar to Arrays to handle conversions for Char arrays + */ +public final class CharArrays { + + private CharArrays() {} + + /** + * Decodes the provided byte[] to a UTF-8 char[]. This is done while avoiding + * conversions to String. The provided byte[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + public static char[] utf8BytesToChars(byte[] utf8Bytes) { + final ByteBuffer byteBuffer = ByteBuffer.wrap(utf8Bytes); + final CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); + final char[] chars; + if (charBuffer.hasArray()) { + // there is no guarantee that the char buffers backing array is the right size + // so we need to make a copy + chars = Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit()); + Arrays.fill(charBuffer.array(), (char) 0); // clear sensitive data + } else { + final int length = charBuffer.limit() - charBuffer.position(); + chars = new char[length]; + charBuffer.get(chars); + // if the buffer is not read only we can reset and fill with 0's + if (charBuffer.isReadOnly() == false) { + charBuffer.clear(); // reset + for (int i = 0; i < charBuffer.limit(); i++) { + charBuffer.put((char) 0); + } + } + } + return chars; + } + + /** + * Encodes the provided char[] to a UTF-8 byte[]. This is done while avoiding + * conversions to String. The provided char[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + public static byte[] toUtf8Bytes(char[] chars) { + final CharBuffer charBuffer = CharBuffer.wrap(chars); + final ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(charBuffer); + final byte[] bytes; + if (byteBuffer.hasArray()) { + // there is no guarantee that the byte buffers backing array is the right size + // so we need to make a copy + bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); + Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data + } else { + final int length = byteBuffer.limit() - byteBuffer.position(); + bytes = new byte[length]; + byteBuffer.get(bytes); + // if the buffer is not read only we can reset and fill with 0's + if (byteBuffer.isReadOnly() == false) { + byteBuffer.clear(); // reset + for (int i = 0; i < byteBuffer.limit(); i++) { + byteBuffer.put((byte) 0); + } + } + } + return bytes; + } + + /** + * Tests if a char[] contains a sequence of characters that match the prefix. This is like + * {@link String#startsWith(String)} but does not require conversion of the char[] to a string. + */ + public static boolean charsBeginsWith(String prefix, char[] chars) { + if (chars == null || prefix == null) { + return false; + } + + if (prefix.length() > chars.length) { + return false; + } + + for (int i = 0; i < prefix.length(); i++) { + if (chars[i] != prefix.charAt(i)) { + return false; + } + } + + return true; + } + + /** + * Constant time equality check of char arrays to avoid potential timing attacks. + */ + public static boolean constantTimeEquals(char[] a, char[] b) { + Objects.requireNonNull(a, "char arrays must not be null for constantTimeEquals"); + Objects.requireNonNull(b, "char arrays must not be null for constantTimeEquals"); + if (a.length != b.length) { + return false; + } + + int equals = 0; + for (int i = 0; i < a.length; i++) { + equals |= a[i] ^ b[i]; + } + + return equals == 0; + } + + /** + * Constant time equality check of strings to avoid potential timing attacks. + */ + public static boolean constantTimeEquals(String a, String b) { + Objects.requireNonNull(a, "strings must not be null for constantTimeEquals"); + Objects.requireNonNull(b, "strings must not be null for constantTimeEquals"); + if (a.length() != b.length()) { + return false; + } + + int equals = 0; + for (int i = 0; i < a.length(); i++) { + equals |= a.charAt(i) ^ b.charAt(i); + } + + return equals == 0; + } +} diff --git a/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java new file mode 100644 index 00000000000..9283283ab08 --- /dev/null +++ b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import org.elasticsearch.test.ESTestCase; + +import java.nio.charset.StandardCharsets; + +public class CharArraysTests extends ESTestCase { + + public void testCharsToBytes() { + final String originalValue = randomUnicodeOfCodepointLengthBetween(0, 32); + final byte[] expectedBytes = originalValue.getBytes(StandardCharsets.UTF_8); + final char[] valueChars = originalValue.toCharArray(); + + final byte[] convertedBytes = CharArrays.toUtf8Bytes(valueChars); + assertArrayEquals(expectedBytes, convertedBytes); + } + + public void testBytesToUtf8Chars() { + final String originalValue = randomUnicodeOfCodepointLengthBetween(0, 32); + final byte[] bytes = originalValue.getBytes(StandardCharsets.UTF_8); + final char[] expectedChars = originalValue.toCharArray(); + + final char[] convertedChars = CharArrays.utf8BytesToChars(bytes); + assertArrayEquals(expectedChars, convertedChars); + } + + public void testCharsBeginsWith() { + assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(4), null)); + assertFalse(CharArrays.charsBeginsWith(null, null)); + assertFalse(CharArrays.charsBeginsWith(null, randomAlphaOfLength(4).toCharArray())); + assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(2), randomAlphaOfLengthBetween(3, 8).toCharArray())); + + final String prefix = randomAlphaOfLengthBetween(2, 4); + assertTrue(CharArrays.charsBeginsWith(prefix, prefix.toCharArray())); + final char[] prefixedValue = prefix.concat(randomAlphaOfLengthBetween(1, 12)).toCharArray(); + assertTrue(CharArrays.charsBeginsWith(prefix, prefixedValue)); + + final String modifiedPrefix = randomBoolean() ? prefix.substring(1) : prefix.substring(0, prefix.length() - 1); + char[] nonMatchingValue; + do { + nonMatchingValue = modifiedPrefix.concat(randomAlphaOfLengthBetween(0, 12)).toCharArray(); + } while (new String(nonMatchingValue).startsWith(prefix)); + assertFalse(CharArrays.charsBeginsWith(prefix, nonMatchingValue)); + assertTrue(CharArrays.charsBeginsWith(modifiedPrefix, nonMatchingValue)); + } + + public void testConstantTimeEquals() { + final String value = randomAlphaOfLengthBetween(0, 32); + assertTrue(CharArrays.constantTimeEquals(value, value)); + assertTrue(CharArrays.constantTimeEquals(value.toCharArray(), value.toCharArray())); + + final String other = randomAlphaOfLengthBetween(1, 32); + assertFalse(CharArrays.constantTimeEquals(value, other)); + assertFalse(CharArrays.constantTimeEquals(value.toCharArray(), other.toCharArray())); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index 345db46f887..7de8353194d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.lang.invoke.MethodType; import java.lang.reflect.Field; @@ -190,7 +191,7 @@ public class ScriptClassInfo { componentType = componentType.getComponentType(); } - if (painlessLookup.lookupPainlessClass(componentType) == null) { + if (componentType != def.class && painlessLookup.lookupPainlessClass(componentType) == null) { throw new IllegalArgumentException(unknownErrorMessageSource.apply(componentType)); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 16b8ac14f14..55855a3cb1e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -26,6 +26,7 @@ import java.util.Objects; import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; @@ -47,7 +48,7 @@ public final class PainlessLookup { public boolean isValidCanonicalClassName(String canonicalClassName) { Objects.requireNonNull(canonicalClassName); - return canonicalClassNamesToClasses.containsKey(canonicalClassName); + return DEF_CLASS_NAME.equals(canonicalClassName) || canonicalClassNamesToClasses.containsKey(canonicalClassName); } public Class canonicalTypeNameToType(String canonicalTypeName) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index e644453a4c1..c8353b54c9f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -211,9 +211,6 @@ public final class PainlessLookupBuilder { public PainlessLookupBuilder() { canonicalClassNamesToClasses = new HashMap<>(); classesToPainlessClassBuilders = new HashMap<>(); - - canonicalClassNamesToClasses.put(DEF_CLASS_NAME, def.class); - classesToPainlessClassBuilders.put(def.class, new PainlessClassBuilder()); } private Class canonicalTypeNameToType(String canonicalTypeName) { @@ -225,7 +222,7 @@ public final class PainlessLookupBuilder { type = type.getComponentType(); } - return classesToPainlessClassBuilders.containsKey(type); + return type == def.class || classesToPainlessClassBuilders.containsKey(type); } public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index f2eb4345169..71cacab9eba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -82,7 +82,7 @@ public final class PainlessLookupUtility { Objects.requireNonNull(canonicalTypeName); Objects.requireNonNull(canonicalClassNamesToClasses); - Class type = canonicalClassNamesToClasses.get(canonicalTypeName); + Class type = DEF_CLASS_NAME.equals(canonicalTypeName) ? def.class : canonicalClassNamesToClasses.get(canonicalTypeName); if (type != null) { return type; @@ -105,7 +105,7 @@ public final class PainlessLookupUtility { } canonicalTypeName = canonicalTypeName.substring(0, canonicalTypeName.indexOf('[')); - type = canonicalClassNamesToClasses.get(canonicalTypeName); + type = DEF_CLASS_NAME.equals(canonicalTypeName) ? def.class : canonicalClassNamesToClasses.get(canonicalTypeName); if (type != null) { char arrayBraces[] = new char[arrayDimensions]; diff --git a/plugins/examples/custom-settings/build.gradle b/plugins/examples/custom-settings/build.gradle index e0e728cec24..3caf29c8513 100644 --- a/plugins/examples/custom-settings/build.gradle +++ b/plugins/examples/custom-settings/build.gradle @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ - apply plugin: 'elasticsearch.esplugin' esplugin { name 'custom-settings' description 'An example plugin showing how to register custom settings' classname 'org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } integTestCluster { diff --git a/plugins/examples/custom-suggester/build.gradle b/plugins/examples/custom-suggester/build.gradle index b36d5cd218d..977e467391d 100644 --- a/plugins/examples/custom-suggester/build.gradle +++ b/plugins/examples/custom-suggester/build.gradle @@ -23,6 +23,8 @@ esplugin { name 'custom-suggester' description 'An example plugin showing how to write and register a custom suggester' classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } integTestCluster { @@ -30,4 +32,4 @@ integTestCluster { } // this plugin has no unit tests, only rest tests -tasks.test.enabled = false \ No newline at end of file +tasks.test.enabled = false diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index ef1ca7d741e..cb2aeb82e9d 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - apply plugin: 'elasticsearch.esplugin' esplugin { @@ -24,10 +23,12 @@ esplugin { description 'An example whitelisting additional classes and methods in painless' classname 'org.elasticsearch.example.painlesswhitelist.MyWhitelistPlugin' extendedPlugins = ['lang-painless'] + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } dependencies { - compileOnly project(':modules:lang-painless') + compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}" } if (System.getProperty('tests.distribution') == null) { diff --git a/plugins/examples/rescore/build.gradle b/plugins/examples/rescore/build.gradle index 4adeb0c721b..cdecd760c81 100644 --- a/plugins/examples/rescore/build.gradle +++ b/plugins/examples/rescore/build.gradle @@ -16,11 +16,13 @@ * specific language governing permissions and limitations * under the License. */ - apply plugin: 'elasticsearch.esplugin' esplugin { name 'example-rescore' description 'An example plugin implementing rescore and verifying that plugins *can* implement rescore' classname 'org.elasticsearch.example.rescore.ExampleRescorePlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } + diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index cfe84e6a45a..eff2fd1b6c6 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ - apply plugin: 'elasticsearch.esplugin' esplugin { name 'rest-handler' description 'An example plugin showing how to register a REST handler' classname 'org.elasticsearch.example.resthandler.ExampleRestHandlerPlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } // No unit tests in this example @@ -40,4 +41,4 @@ integTestCluster { } integTestRunner { systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" -} +} \ No newline at end of file diff --git a/plugins/examples/script-expert-scoring/build.gradle b/plugins/examples/script-expert-scoring/build.gradle index 7c602d9bc02..e9da62acdcf 100644 --- a/plugins/examples/script-expert-scoring/build.gradle +++ b/plugins/examples/script-expert-scoring/build.gradle @@ -16,13 +16,15 @@ * specific language governing permissions and limitations * under the License. */ - apply plugin: 'elasticsearch.esplugin' esplugin { name 'script-expert-scoring' description 'An example script engine to use low level Lucene internals for expert scoring' classname 'org.elasticsearch.example.expertscript.ExpertScriptPlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } test.enabled = false + diff --git a/server/build.gradle b/server/build.gradle index 1964eddd03e..b22a93a702c 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -342,3 +342,15 @@ if (isEclipse == false || project.path == ":server-tests") { integTest.mustRunAfter test } +// TODO: remove these compatibility tests in 7.0 +additionalTest('testScriptedMetricAggParamsV6Compatibility') { + include '**/ScriptedMetricAggregatorAggStateV6CompatTests.class' + include '**/InternalScriptedMetricAggStateV6CompatTests.class' + systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'true' +} + +test { + // these are tested explicitly in separate test tasks + exclude '**/ScriptedMetricAggregatorAggStateV6CompatTests.class' + exclude '**/InternalScriptedMetricAggStateV6CompatTests.class' +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java index 50df7b1bb26..fb3e6ac71ad 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java @@ -19,142 +19,22 @@ package org.elasticsearch.action.admin.cluster.node.reload; - -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.nodes.BaseNodesRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.SecureString; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.CharBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; - -import static org.elasticsearch.action.ValidateActions.addValidationError; /** - * Request for a reload secure settings action + * Request for a reload secure settings action. */ public class NodesReloadSecureSettingsRequest extends BaseNodesRequest { - /** - * The password which is broadcasted to all nodes, but is never stored on - * persistent storage. The password is used to reread and decrypt the contents - * of the node's keystore (backing the implementation of - * {@code SecureSettings}). - */ - private SecureString secureSettingsPassword; - public NodesReloadSecureSettingsRequest() { } /** - * Reload secure settings only on certain nodes, based on the nodes ids - * specified. If none are passed, secure settings will be reloaded on all the - * nodes. + * Reload secure settings only on certain nodes, based on the nodes IDs specified. If none are passed, secure settings will be reloaded + * on all the nodes. */ - public NodesReloadSecureSettingsRequest(String... nodesIds) { + public NodesReloadSecureSettingsRequest(final String... nodesIds) { super(nodesIds); } - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (secureSettingsPassword == null) { - validationException = addValidationError("secure settings password cannot be null (use empty string instead)", - validationException); - } - return validationException; - } - - public SecureString secureSettingsPassword() { - return secureSettingsPassword; - } - - public NodesReloadSecureSettingsRequest secureStorePassword(SecureString secureStorePassword) { - this.secureSettingsPassword = secureStorePassword; - return this; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - final byte[] passwordBytes = in.readByteArray(); - try { - this.secureSettingsPassword = new SecureString(utf8BytesToChars(passwordBytes)); - } finally { - Arrays.fill(passwordBytes, (byte) 0); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - final byte[] passwordBytes = charsToUtf8Bytes(this.secureSettingsPassword.getChars()); - try { - out.writeByteArray(passwordBytes); - } finally { - Arrays.fill(passwordBytes, (byte) 0); - } - } - - /** - * Encodes the provided char[] to a UTF-8 byte[]. This is done while avoiding - * conversions to String. The provided char[] is not modified by this method, so - * the caller needs to take care of clearing the value if it is sensitive. - */ - private static byte[] charsToUtf8Bytes(char[] chars) { - final CharBuffer charBuffer = CharBuffer.wrap(chars); - final ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(charBuffer); - final byte[] bytes; - if (byteBuffer.hasArray()) { - // there is no guarantee that the byte buffers backing array is the right size - // so we need to make a copy - bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); - Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data - } else { - final int length = byteBuffer.limit() - byteBuffer.position(); - bytes = new byte[length]; - byteBuffer.get(bytes); - // if the buffer is not read only we can reset and fill with 0's - if (byteBuffer.isReadOnly() == false) { - byteBuffer.clear(); // reset - for (int i = 0; i < byteBuffer.limit(); i++) { - byteBuffer.put((byte) 0); - } - } - } - return bytes; - } - - /** - * Decodes the provided byte[] to a UTF-8 char[]. This is done while avoiding - * conversions to String. The provided byte[] is not modified by this method, so - * the caller needs to take care of clearing the value if it is sensitive. - */ - public static char[] utf8BytesToChars(byte[] utf8Bytes) { - final ByteBuffer byteBuffer = ByteBuffer.wrap(utf8Bytes); - final CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); - final char[] chars; - if (charBuffer.hasArray()) { - // there is no guarantee that the char buffers backing array is the right size - // so we need to make a copy - chars = Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit()); - Arrays.fill(charBuffer.array(), (char) 0); // clear sensitive data - } else { - final int length = charBuffer.limit() - charBuffer.position(); - chars = new char[length]; - charBuffer.get(chars); - // if the buffer is not read only we can reset and fill with 0's - if (charBuffer.isReadOnly() == false) { - charBuffer.clear(); // reset - for (int i = 0; i < charBuffer.limit(); i++) { - charBuffer.put((char) 0); - } - } - } - return chars; - } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java index b5f2f73e56f..c8250455e6b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java @@ -19,19 +19,8 @@ package org.elasticsearch.action.admin.cluster.node.reload; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Objects; /** * Builder for the reload secure settings nodes request @@ -39,46 +28,8 @@ import java.util.Objects; public class NodesReloadSecureSettingsRequestBuilder extends NodesOperationRequestBuilder { - public static final String SECURE_SETTINGS_PASSWORD_FIELD_NAME = "secure_settings_password"; - public NodesReloadSecureSettingsRequestBuilder(ElasticsearchClient client, NodesReloadSecureSettingsAction action) { super(client, action, new NodesReloadSecureSettingsRequest()); } - public NodesReloadSecureSettingsRequestBuilder setSecureStorePassword(SecureString secureStorePassword) { - request.secureStorePassword(secureStorePassword); - return this; - } - - public NodesReloadSecureSettingsRequestBuilder source(BytesReference source, XContentType xContentType) throws IOException { - Objects.requireNonNull(xContentType); - // EMPTY is ok here because we never call namedObject - try (InputStream stream = source.streamInput(); - XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, stream)) { - XContentParser.Token token; - token = parser.nextToken(); - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchParseException("expected an object, but found token [{}]", token); - } - token = parser.nextToken(); - if (token != XContentParser.Token.FIELD_NAME || false == SECURE_SETTINGS_PASSWORD_FIELD_NAME.equals(parser.currentName())) { - throw new ElasticsearchParseException("expected a field named [{}], but found [{}]", SECURE_SETTINGS_PASSWORD_FIELD_NAME, - token); - } - token = parser.nextToken(); - if (token != XContentParser.Token.VALUE_STRING) { - throw new ElasticsearchParseException("expected field [{}] to be of type string, but found [{}] instead", - SECURE_SETTINGS_PASSWORD_FIELD_NAME, token); - } - final String password = parser.text(); - setSecureStorePassword(new SecureString(password.toCharArray())); - token = parser.nextToken(); - if (token != XContentParser.Token.END_OBJECT) { - throw new ElasticsearchParseException("expected end of object, but found token [{}]", token); - } - } - return this; - } - } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index 0f44170fa60..b8a36bac68d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.KeyStoreWrapper; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.PluginsService; @@ -82,16 +81,13 @@ public class TransportNodesReloadSecureSettingsAction extends TransportNodesActi @Override protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation(NodeRequest nodeReloadRequest) { - final NodesReloadSecureSettingsRequest request = nodeReloadRequest.request; - final SecureString secureSettingsPassword = request.secureSettingsPassword(); try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { // reread keystore from config file if (keystore == null) { return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), new IllegalStateException("Keystore is missing")); } - // decrypt the keystore using the password from the request - keystore.decrypt(secureSettingsPassword.getChars()); + keystore.decrypt(new char[0]); // add the keystore to the original node settings object final Settings settingsWithKeystore = Settings.builder() .put(environment.settings(), false) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 921e472c94f..9cd8ef1f6ac 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; import java.io.Closeable; import java.io.IOException; @@ -421,6 +422,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers, fullPathObjectMappers, fieldTypes); + ContextMapping.validateContextPaths(indexSettings.getIndexVersionCreated(), fieldMappers, fieldTypes::get); + if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is // a call to the update mapping API. For all other cases like diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java index 0697871ea5d..2251615d678 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java @@ -59,7 +59,6 @@ public final class RestReloadSecureSettingsAction extends BaseRestHandler { .cluster() .prepareReloadSecureSettings() .setTimeout(request.param("timeout")) - .source(request.requiredContent(), request.getXContentType()) .setNodesIds(nodesIds); final NodesReloadSecureSettingsRequest nodesRequest = nodesRequestBuilder.request(); return channel -> nodesRequestBuilder @@ -68,12 +67,12 @@ public final class RestReloadSecureSettingsAction extends BaseRestHandler { public RestResponse buildResponse(NodesReloadSecureSettingsResponse response, XContentBuilder builder) throws Exception { builder.startObject(); - RestActions.buildNodesHeader(builder, channel.request(), response); - builder.field("cluster_name", response.getClusterName().value()); - response.toXContent(builder, channel.request()); + { + RestActions.buildNodesHeader(builder, channel.request(), response); + builder.field("cluster_name", response.getClusterName().value()); + response.toXContent(builder, channel.request()); + } builder.endObject(); - // clear password for the original request - nodesRequest.secureSettingsPassword().close(); return new BytesRestResponse(RestStatus.OK, builder); } }); diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index 774dc95d399..0c34c59b7be 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -22,6 +22,8 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -31,6 +33,25 @@ import java.util.List; import java.util.Map; public class ScriptedMetricAggContexts { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(ScriptedMetricAggContexts.class)); + + // Public for access from tests + public static final String AGG_PARAM_DEPRECATION_WARNING = + "params._agg/_aggs for scripted metric aggregations are deprecated, use state/states (not in params) instead. " + + "Use -Des.aggregations.enable_scripted_metric_agg_param=false to disable."; + + public static boolean deprecatedAggParamEnabled() { + boolean enabled = Boolean.parseBoolean( + System.getProperty("es.aggregations.enable_scripted_metric_agg_param", "true")); + + if (enabled) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("enable_scripted_metric_agg_param", AGG_PARAM_DEPRECATION_WARNING); + } + + return enabled; + } + private abstract static class ParamsAndStateBase { private final Map params; private final Object state; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java index e35bf376aae..810126e8512 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java @@ -209,7 +209,10 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder (targetBuckets * roundings[roundingIdx].getMaximumInnerInterval()) + } while (requiredBuckets > (targetBuckets * roundings[currentRoundingIdx - 1].getMaximumInnerInterval()) && currentRoundingIdx < roundings.length); // The loop will increase past the correct rounding index here so we // need to subtract one to get the rounding index we need diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index f4281c063ff..4124a8eeb76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -96,7 +96,9 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip } // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). - params.put("_aggs", aggregationObjects); + if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) { + params.put("_aggs", aggregationObjects); + } ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index 9bd904a0701..076c29fecea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -83,10 +83,17 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory, since // it won't be possible to completely replace it with another type as is possible when it's an entry in params. - if (aggParams.containsKey("_agg") == false) { - aggParams.put("_agg", new HashMap()); + Object aggState = new HashMap(); + if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) { + if (aggParams.containsKey("_agg") == false) { + // Add _agg if it wasn't added manually + aggParams.put("_agg", aggState); + } else { + // If it was added manually, also use it for the agg context variable to reduce the likelihood of + // weird behavior due to multiple different variables. + aggState = aggParams.get("_agg"); + } } - Object aggState = aggParams.get("_agg"); final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( mergeParams(aggParams, initScriptParams), aggState); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index 049de439ac7..9483e76d072 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -220,7 +220,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder implements ToXContent */ protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException; + /** + * Checks if the current context is consistent with the rest of the fields. For example, the GeoContext + * should check that the field that it points to has the correct type. + */ + protected void validateReferences(Version indexVersionCreated, Function fieldResolver) { + // No validation is required by default + } + + /** + * Verifies that all field paths specified in contexts point to the fields with correct mappings + */ + public static void validateContextPaths(Version indexVersionCreated, List fieldMappers, + Function fieldResolver) { + for (FieldMapper fieldMapper : fieldMappers) { + if (CompletionFieldMapper.CONTENT_TYPE.equals(fieldMapper.typeName())) { + CompletionFieldMapper.CompletionFieldType fieldType = ((CompletionFieldMapper) fieldMapper).fieldType(); + if (fieldType.hasContextMappings()) { + for (ContextMapping context : fieldType.getContextMappings()) { + context.validateReferences(indexVersionCreated, fieldResolver); + } + } + } + } + } + @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(FIELD_NAME, name); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 3c0f0e80ceb..b4c3276b946 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -37,6 +37,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -50,7 +51,7 @@ import static org.elasticsearch.search.suggest.completion.context.ContextMapping * and creates context queries for defined {@link ContextMapping}s * for a {@link CompletionFieldMapper} */ -public class ContextMappings implements ToXContent { +public class ContextMappings implements ToXContent, Iterable> { private final List> contextMappings; private final Map> contextNameMap; @@ -97,6 +98,11 @@ public class ContextMappings implements ToXContent { document.add(new TypedContextField(name, input, weight, contexts, document)); } + @Override + public Iterator> iterator() { + return contextMappings.iterator(); + } + /** * Field prepends context values with a suggestion * Context values are associated with a type, denoted by diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 48aaf705099..938c4963620 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -19,12 +19,17 @@ package org.elasticsearch.search.suggest.completion.context; +import org.apache.logging.log4j.LogManager; +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -42,6 +47,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors; @@ -69,6 +75,8 @@ public class GeoContextMapping extends ContextMapping { static final String CONTEXT_PRECISION = "precision"; static final String CONTEXT_NEIGHBOURS = "neighbours"; + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(GeoContextMapping.class)); + private final int precision; private final String fieldName; @@ -205,11 +213,11 @@ public class GeoContextMapping extends ContextMapping { for (IndexableField field : fields) { if (field instanceof StringField) { spare.resetFromString(field.stringValue()); - } else { - // todo return this to .stringValue() once LatLonPoint implements it + geohashes.add(spare.geohash()); + } else if (field instanceof LatLonPoint || field instanceof LatLonDocValuesField) { spare.resetFromIndexableField(field); + geohashes.add(spare.geohash()); } - geohashes.add(spare.geohash()); } } } @@ -279,6 +287,32 @@ public class GeoContextMapping extends ContextMapping { return internalQueryContextList; } + @Override + protected void validateReferences(Version indexVersionCreated, Function fieldResolver) { + if (fieldName != null) { + MappedFieldType mappedFieldType = fieldResolver.apply(fieldName); + if (mappedFieldType == null) { + if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping", + "field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name); + } else { + throw new ElasticsearchParseException( + "field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name); + } + } else if (GeoPointFieldMapper.CONTENT_TYPE.equals(mappedFieldType.typeName()) == false) { + if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping", + "field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]", + fieldName, name, mappedFieldType.typeName()); + } else { + throw new ElasticsearchParseException( + "field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]", + fieldName, name, mappedFieldType.typeName()); + } + } + } + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 107a4b32d89..a12f27c93e3 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import java.util.function.Supplier; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.ClusterNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -48,9 +49,20 @@ public abstract class RemoteClusterAware extends AbstractComponent { /** * A list of initial seed nodes to discover eligible nodes from the remote cluster */ - public static final Setting.AffixSetting> REMOTE_CLUSTERS_SEEDS = Setting.affixKeySetting("search.remote.", - "seeds", (key) -> Setting.listSetting(key, Collections.emptyList(), RemoteClusterAware::parseSeedAddress, - Setting.Property.NodeScope, Setting.Property.Dynamic)); + public static final Setting.AffixSetting> REMOTE_CLUSTERS_SEEDS = Setting.affixKeySetting( + "search.remote.", + "seeds", + key -> Setting.listSetting( + key, Collections.emptyList(), + s -> { + // validate seed address + parsePort(s); + return s; + }, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ) + ); public static final char REMOTE_CLUSTER_INDEX_SEPARATOR = ':'; public static final String LOCAL_CLUSTER_GROUP_KEY = ""; @@ -65,18 +77,20 @@ public abstract class RemoteClusterAware extends AbstractComponent { this.clusterNameResolver = new ClusterNameExpressionResolver(settings); } - protected static Map> buildRemoteClustersSeeds(Settings settings) { - Stream>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings); + protected static Map>> buildRemoteClustersSeeds(Settings settings) { + Stream>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings); return allConcreteSettings.collect( Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> { String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting); - List nodes = new ArrayList<>(); - for (InetSocketAddress address : concreteSetting.get(settings)) { - TransportAddress transportAddress = new TransportAddress(address); - DiscoveryNode node = new DiscoveryNode(clusterName + "#" + transportAddress.toString(), - transportAddress, - Version.CURRENT.minimumCompatibilityVersion()); - nodes.add(node); + List addresses = concreteSetting.get(settings); + List> nodes = new ArrayList<>(addresses.size()); + for (String address : addresses) { + nodes.add(() -> { + TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address)); + return new DiscoveryNode(clusterName + "#" + transportAddress.toString(), + transportAddress, + Version.CURRENT.minimumCompatibilityVersion()); + }); } return nodes; })); @@ -128,7 +142,7 @@ public abstract class RemoteClusterAware extends AbstractComponent { * Subclasses must implement this to receive information about updated cluster aliases. If the given address list is * empty the cluster alias is unregistered and should be removed. */ - protected abstract void updateRemoteCluster(String clusterAlias, List addresses); + protected abstract void updateRemoteCluster(String clusterAlias, List addresses); /** * Registers this instance to listen to updates on the cluster settings. @@ -138,29 +152,37 @@ public abstract class RemoteClusterAware extends AbstractComponent { (namespace, value) -> {}); } - private static InetSocketAddress parseSeedAddress(String remoteHost) { - int portSeparator = remoteHost.lastIndexOf(':'); // in case we have a IPv6 address ie. [::1]:9300 - if (portSeparator == -1 || portSeparator == remoteHost.length()) { - throw new IllegalArgumentException("remote hosts need to be configured as [host:port], found [" + remoteHost + "] instead"); - } - String host = remoteHost.substring(0, portSeparator); + protected static InetSocketAddress parseSeedAddress(String remoteHost) { + String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost)); InetAddress hostAddress; try { hostAddress = InetAddress.getByName(host); } catch (UnknownHostException e) { throw new IllegalArgumentException("unknown host [" + host + "]", e); } + return new InetSocketAddress(hostAddress, parsePort(remoteHost)); + } + + private static int parsePort(String remoteHost) { try { - int port = Integer.valueOf(remoteHost.substring(portSeparator + 1)); + int port = Integer.valueOf(remoteHost.substring(indexOfPortSeparator(remoteHost) + 1)); if (port <= 0) { throw new IllegalArgumentException("port number must be > 0 but was: [" + port + "]"); } - return new InetSocketAddress(hostAddress, port); + return port; } catch (NumberFormatException e) { - throw new IllegalArgumentException("port must be a number", e); + throw new IllegalArgumentException("failed to parse port", e); } } + private static int indexOfPortSeparator(String remoteHost) { + int portSeparator = remoteHost.lastIndexOf(':'); // in case we have a IPv6 address ie. [::1]:9300 + if (portSeparator == -1 || portSeparator == remoteHost.length()) { + throw new IllegalArgumentException("remote hosts need to be configured as [host:port], found [" + remoteHost + "] instead"); + } + return portSeparator; + } + public static String buildRemoteIndexName(String clusterAlias, String indexName) { return clusterAlias != null ? clusterAlias + REMOTE_CLUSTER_INDEX_SEPARATOR + indexName : indexName; } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 67c0e1a5aa6..15cf7899dc0 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import java.util.function.Supplier; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.SetOnce; @@ -84,7 +85,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo private final String clusterAlias; private final int maxNumRemoteConnections; private final Predicate nodePredicate; - private volatile List seedNodes; + private volatile List> seedNodes; private volatile boolean skipUnavailable; private final ConnectHandler connectHandler; private SetOnce remoteClusterName = new SetOnce<>(); @@ -99,7 +100,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo * @param maxNumRemoteConnections the maximum number of connections to the remote cluster * @param nodePredicate a predicate to filter eligible remote nodes to connect to */ - RemoteClusterConnection(Settings settings, String clusterAlias, List seedNodes, + RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate) { super(settings); this.localClusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); @@ -127,7 +128,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo /** * Updates the list of seed nodes for this cluster connection */ - synchronized void updateSeedNodes(List seedNodes, ActionListener connectListener) { + synchronized void updateSeedNodes(List> seedNodes, ActionListener connectListener) { this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes)); connectHandler.connect(connectListener); } @@ -456,7 +457,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo }); } - void collectRemoteNodes(Iterator seedNodes, + private void collectRemoteNodes(Iterator> seedNodes, final TransportService transportService, ActionListener listener) { if (Thread.currentThread().isInterrupted()) { listener.onFailure(new InterruptedException("remote connect thread got interrupted")); @@ -464,7 +465,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo try { if (seedNodes.hasNext()) { cancellableThreads.executeIO(() -> { - final DiscoveryNode seedNode = seedNodes.next(); + final DiscoveryNode seedNode = seedNodes.next().get(); final TransportService.HandshakeResponse handshakeResponse; Transport.Connection connection = transportService.openConnection(seedNode, ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null)); @@ -554,11 +555,11 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo private final TransportService transportService; private final Transport.Connection connection; private final ActionListener listener; - private final Iterator seedNodes; + private final Iterator> seedNodes; private final CancellableThreads cancellableThreads; SniffClusterStateResponseHandler(TransportService transportService, Transport.Connection connection, - ActionListener listener, Iterator seedNodes, + ActionListener listener, Iterator> seedNodes, CancellableThreads cancellableThreads) { this.transportService = transportService; this.connection = connection; @@ -651,7 +652,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo * Get the information about remote nodes to be rendered on {@code _remote/info} requests. */ public RemoteConnectionInfo getConnectionInfo() { - List seedNodeAddresses = seedNodes.stream().map(DiscoveryNode::getAddress).collect(Collectors.toList()); + List seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect(Collectors.toList()); TimeValue initialConnectionTimeout = RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings); return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(), initialConnectionTimeout, skipUnavailable); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index a07de63d537..956a0d94179 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import java.util.function.Supplier; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; @@ -40,7 +41,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; -import java.net.InetSocketAddress; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -115,7 +115,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl * @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes * @param connectionListener a listener invoked once every configured cluster has been connected to */ - private synchronized void updateRemoteClusters(Map> seeds, ActionListener connectionListener) { + private synchronized void updateRemoteClusters(Map>> seeds, + ActionListener connectionListener) { if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) { throw new IllegalArgumentException("remote clusters must not have the empty string as its key"); } @@ -125,7 +126,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } else { CountDown countDown = new CountDown(seeds.size()); remoteClusters.putAll(this.remoteClusters); - for (Map.Entry> entry : seeds.entrySet()) { + for (Map.Entry>> entry : seeds.entrySet()) { RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey()); if (entry.getValue().isEmpty()) { // with no seed nodes we just remove the connection try { @@ -310,16 +311,17 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } } - protected void updateRemoteCluster(String clusterAlias, List addresses) { + @Override + protected void updateRemoteCluster(String clusterAlias, List addresses) { updateRemoteCluster(clusterAlias, addresses, ActionListener.wrap((x) -> {}, (x) -> {})); } void updateRemoteCluster( final String clusterAlias, - final List addresses, + final List addresses, final ActionListener connectionListener) { - final List nodes = addresses.stream().map(address -> { - final TransportAddress transportAddress = new TransportAddress(address); + final List> nodes = addresses.stream().>map(address -> () -> { + final TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address)); final String id = clusterAlias + "#" + transportAddress.toString(); final Version version = Version.CURRENT.minimumCompatibilityVersion(); return new DiscoveryNode(id, transportAddress, version); @@ -334,7 +336,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl void initializeRemoteClusters() { final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings); final PlainActionFuture future = new PlainActionFuture<>(); - Map> seeds = RemoteClusterAware.buildRemoteClustersSeeds(settings); + Map>> seeds = RemoteClusterAware.buildRemoteClustersSeeds(settings); updateRemoteClusters(seeds, future); try { future.get(timeValue.millis(), TimeUnit.MILLISECONDS); diff --git a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index 79527582405..3f9e258ffec 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -20,11 +20,9 @@ package org.elasticsearch.action.admin; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureSettings; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; @@ -44,11 +42,11 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.containsString; public class ReloadSecureSettingsIT extends ESIntegTestCase { @@ -62,7 +60,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configFile())); final int initialReloadCount = mockReloadablePlugin.getReloadCount(); final CountDownLatch latch = new CountDownLatch(1); - client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + client().admin().cluster().prepareReloadSecureSettings().execute( new ActionListener() { @Override public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { @@ -96,44 +94,6 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); } - public void testNullKeystorePassword() throws Exception { - final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); - final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) - .stream().findFirst().get(); - final AtomicReference reloadSettingsError = new AtomicReference<>(); - final int initialReloadCount = mockReloadablePlugin.getReloadCount(); - final CountDownLatch latch = new CountDownLatch(1); - client().admin().cluster().prepareReloadSecureSettings().execute( - new ActionListener() { - @Override - public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { - try { - reloadSettingsError.set(new AssertionError("Null keystore password should fail")); - } finally { - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) { - try { - assertThat(e, instanceOf(ActionRequestValidationException.class)); - assertThat(e.getMessage(), containsString("secure settings password cannot be null")); - } catch (final AssertionError ae) { - reloadSettingsError.set(ae); - } finally { - latch.countDown(); - } - } - }); - latch.await(); - if (reloadSettingsError.get() != null) { - throw reloadSettingsError.get(); - } - // in the null password case no reload should be triggered - assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); - } - public void testInvalidKeystoreFile() throws Exception { final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) @@ -149,7 +109,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configFile()), StandardCopyOption.REPLACE_EXISTING); } final CountDownLatch latch = new CountDownLatch(1); - client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + client().admin().cluster().prepareReloadSecureSettings().execute( new ActionListener() { @Override public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { @@ -181,52 +141,6 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); } - public void testWrongKeystorePassword() throws Exception { - final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); - final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) - .stream().findFirst().get(); - final Environment environment = internalCluster().getInstance(Environment.class); - final AtomicReference reloadSettingsError = new AtomicReference<>(); - final int initialReloadCount = mockReloadablePlugin.getReloadCount(); - // "some" keystore should be present in this case - writeEmptyKeystore(environment, new char[0]); - final CountDownLatch latch = new CountDownLatch(1); - client().admin() - .cluster() - .prepareReloadSecureSettings() - .setSecureStorePassword(new SecureString(new char[] { 'W', 'r', 'o', 'n', 'g' })) - .execute(new ActionListener() { - @Override - public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { - try { - assertThat(nodesReloadResponse, notNullValue()); - final Map nodesMap = nodesReloadResponse.getNodesMap(); - assertThat(nodesMap.size(), equalTo(cluster().size())); - for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { - assertThat(nodeResponse.reloadException(), notNullValue()); - assertThat(nodeResponse.reloadException(), instanceOf(SecurityException.class)); - } - } catch (final AssertionError e) { - reloadSettingsError.set(e); - } finally { - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) { - reloadSettingsError.set(new AssertionError("Nodes request failed", e)); - latch.countDown(); - } - }); - latch.await(); - if (reloadSettingsError.get() != null) { - throw reloadSettingsError.get(); - } - // in the wrong password case no reload should be triggered - assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); - } - public void testMisbehavingPlugin() throws Exception { final Environment environment = internalCluster().getInstance(Environment.class); final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); @@ -247,7 +161,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { .get(Settings.builder().put(environment.settings()).setSecureSettings(secureSettings).build()) .toString(); final CountDownLatch latch = new CountDownLatch(1); - client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + client().admin().cluster().prepareReloadSecureSettings().execute( new ActionListener() { @Override public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { @@ -314,7 +228,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { private void successfulReloadCall() throws InterruptedException { final AtomicReference reloadSettingsError = new AtomicReference<>(); final CountDownLatch latch = new CountDownLatch(1); - client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + client().admin().cluster().prepareReloadSecureSettings().execute( new ActionListener() { @Override public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { diff --git a/server/src/test/java/org/elasticsearch/cluster/ack/AckIT.java b/server/src/test/java/org/elasticsearch/cluster/ack/AckIT.java index 2cd8a2c27c7..df97854cc35 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ack/AckIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/ack/AckIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.ack; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -50,6 +51,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @ClusterScope(minNumDataNodes = 2) +@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/32767") public class AckIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java b/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java index 33ea83c7592..f188eb4cac6 100644 --- a/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java +++ b/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java @@ -69,7 +69,7 @@ public class DateTimeUnitTests extends ESTestCase { public void testConversion() { long millis = randomLongBetween(0, Instant.now().toEpochMilli()); DateTimeZone zone = randomDateTimeZone(); - ZoneId zoneId = ZoneId.of(zone.getID()); + ZoneId zoneId = zone.toTimeZone().toZoneId(); int offsetSeconds = zoneId.getRules().getOffset(Instant.ofEpochMilli(millis)).getTotalSeconds(); long parsedMillisJavaTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 5091af5a540..50f95bf4d47 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2400,8 +2400,7 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(sourceShard, targetShard); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32766") - public void testDocStats() throws IOException { + public void testDocStats() throws IOException, InterruptedException { IndexShard indexShard = null; try { indexShard = newStartedShard( @@ -2460,15 +2459,6 @@ public class IndexShardTests extends IndexShardTestCase { assertTrue(searcher.reader().numDocs() <= docStats.getCount()); } assertThat(docStats.getCount(), equalTo(numDocs)); - // Lucene will delete a segment if all docs are deleted from it; - // this means that we lose the deletes when deleting all docs. - // If soft-delete is enabled, each delete op will add a deletion marker. - final long deleteTombstones = indexShard.indexSettings.isSoftDeleteEnabled() ? numDocsToDelete : 0L; - if (numDocsToDelete == numDocs) { - assertThat(docStats.getDeleted(), equalTo(deleteTombstones)); - } else { - assertThat(docStats.getDeleted(), equalTo(numDocsToDelete + deleteTombstones)); - } } // merge them away diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index fa19702d39b..3bb79959033 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.SnapshotMatchers; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.HashMap; import java.util.List; @@ -75,7 +74,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { } } - @TestLogging("_root:TRACE") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32089") public void testRetentionPolicyChangeDuringRecovery() throws Exception { try (ReplicationGroup shards = createGroup(0)) { shards.startPrimary(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java index 327a717f05c..bdfdd4d028f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java @@ -178,4 +178,18 @@ public class FiltersTests extends BaseAggregationTestCase 0L))); } + + public void testRewritePreservesOtherBucket() throws IOException { + FiltersAggregationBuilder originalFilters = new FiltersAggregationBuilder("my-agg", new BoolQueryBuilder()); + originalFilters.otherBucket(randomBoolean()); + originalFilters.otherBucketKey(randomAlphaOfLength(10)); + + AggregationBuilder rewritten = originalFilters.rewrite(new QueryRewriteContext(xContentRegistry(), + null, null, () -> 0L)); + assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class)); + + FiltersAggregationBuilder rewrittenFilters = (FiltersAggregationBuilder) rewritten; + assertEquals(originalFilters.otherBucket(), rewrittenFilters.otherBucket()); + assertEquals(originalFilters.otherBucketKey(), rewrittenFilters.otherBucketKey()); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 981d263d7d6..b7c5bf03ac5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -28,7 +29,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHi import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import java.time.Instant; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -39,6 +44,8 @@ import java.util.TreeMap; import static org.elasticsearch.common.unit.TimeValue.timeValueHours; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; +import static org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.createRounding; +import static org.hamcrest.Matchers.equalTo; public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase { @@ -61,6 +68,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati int nbBuckets = randomNumberOfBuckets(); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); List buckets = new ArrayList<>(nbBuckets); + long startingDate = System.currentTimeMillis(); long interval = randomIntBetween(1, 3); @@ -72,23 +80,41 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati } InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); - - return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); } + /* + This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals + passed in, instead of using the interval associated with the loop. + */ + public void testGetAppropriateRoundingUsesCorrectIntervals() { + RoundingInfo[] roundings = new RoundingInfo[6]; + DateTimeZone timeZone = DateTimeZone.UTC; + // Since we pass 0 as the starting index to getAppropriateRounding, we'll also use + // an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval() + // will be larger than the estimate. + roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), + 1000L, 1000); + roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), + 60 * 1000L, 1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), + 60 * 60 * 1000L, 1, 3, 12); + + OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC); + // We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function + // to increment the rounding (because the bug was that the function would not use the innerIntervals + // from the new rounding. + int result = InternalAutoDateHistogram.getAppropriateRounding(timestamp.toEpochSecond()*1000, + timestamp.plusDays(1).toEpochSecond()*1000, 0, roundings, 25); + assertThat(result, equalTo(2)); + } + @Override protected void assertReduced(InternalAutoDateHistogram reduced, List inputs) { - int roundingIdx = 0; - for (InternalAutoDateHistogram histogram : inputs) { - if (histogram.getBucketInfo().roundingIdx > roundingIdx) { - roundingIdx = histogram.getBucketInfo().roundingIdx; - } - } - RoundingInfo roundingInfo = roundingInfos[roundingIdx]; long lowest = Long.MAX_VALUE; long highest = 0; + for (InternalAutoDateHistogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { long bucketKey = ((DateTime) bucket.getKey()).getMillis(); @@ -100,35 +126,72 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati } } } + + int roundingIndex = reduced.getBucketInfo().roundingIdx; + RoundingInfo roundingInfo = roundingInfos[roundingIndex]; + long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); - long innerIntervalToUse = 0; - for (int interval : roundingInfo.innerIntervals) { - if (normalizedDuration / interval < maxNumberOfBuckets()) { - innerIntervalToUse = interval; + long innerIntervalToUse = roundingInfo.innerIntervals[0]; + int innerIntervalIndex = 0; + + // First, try to calculate the correct innerInterval using the normalizedDuration. + // This handles cases where highest and lowest are further apart than the interval being used. + if (normalizedDuration != 0) { + for (int j = roundingInfo.innerIntervals.length-1; j >= 0; j--) { + int interval = roundingInfo.innerIntervals[j]; + if (normalizedDuration / interval < reduced.getBuckets().size()) { + innerIntervalToUse = interval; + innerIntervalIndex = j; + } } } + + long intervalInMillis = innerIntervalToUse * roundingInfo.getRoughEstimateDurationMillis(); + int bucketCount = getBucketCount(lowest, highest, roundingInfo, intervalInMillis); + + //Next, if our bucketCount is still above what we need, we'll go back and determine the interval + // based on a size calculation. + if (bucketCount > reduced.getBuckets().size()) { + for (int i = innerIntervalIndex; i < roundingInfo.innerIntervals.length; i++) { + long newIntervalMillis = roundingInfo.innerIntervals[i] * roundingInfo.getRoughEstimateDurationMillis(); + if (getBucketCount(lowest, highest, roundingInfo, newIntervalMillis) <= reduced.getBuckets().size()) { + innerIntervalToUse = roundingInfo.innerIntervals[i]; + intervalInMillis = innerIntervalToUse * roundingInfo.getRoughEstimateDurationMillis(); + } + } + } + Map expectedCounts = new TreeMap<>(); - long intervalInMillis = innerIntervalToUse*roundingInfo.getRoughEstimateDurationMillis(); for (long keyForBucket = roundingInfo.rounding.round(lowest); - keyForBucket <= highest; + keyForBucket <= roundingInfo.rounding.round(highest); keyForBucket = keyForBucket + intervalInMillis) { expectedCounts.put(keyForBucket, 0L); + // Iterate through the input buckets, and for each bucket, determine if it's inside + // the range of the bucket in the outer loop. if it is, add the doc count to the total + // for that bucket. + for (InternalAutoDateHistogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - long bucketKey = ((DateTime) bucket.getKey()).getMillis(); - long roundedBucketKey = roundingInfo.rounding.round(bucketKey); + long roundedBucketKey = roundingInfo.rounding.round(((DateTime) bucket.getKey()).getMillis()); + long docCount = bucket.getDocCount(); if (roundedBucketKey >= keyForBucket && roundedBucketKey < keyForBucket + intervalInMillis) { - long count = bucket.getDocCount(); expectedCounts.compute(keyForBucket, - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + count); + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount); } } } } + // If there is only a single bucket, and we haven't added it above, add a bucket with no documents. + // this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above. + if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) { + expectedCounts.put(roundingInfo.rounding.round(lowest), 0L); + } + + // pick out the actual reduced values to the make the assertion more readable Map actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), @@ -137,12 +200,16 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati assertEquals(expectedCounts, actualCounts); } - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") - public void testReduceRandom() { - super.testReduceRandom(); + private int getBucketCount(long lowest, long highest, RoundingInfo roundingInfo, long intervalInMillis) { + int bucketCount = 0; + for (long keyForBucket = roundingInfo.rounding.round(lowest); + keyForBucket <= roundingInfo.rounding.round(highest); + keyForBucket = keyForBucket + intervalInMillis) { + bucketCount++; + } + return bucketCount; } - + @Override protected Writeable.Reader instanceReader() { return InternalAutoDateHistogram::new; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 13e14897959..c000b7fb228 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -67,6 +67,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; @@ -90,42 +91,57 @@ public class ScriptedMetricIT extends ESIntegTestCase { protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); - scripts.put("_agg['count'] = 1", vars -> - aggScript(vars, agg -> ((Map) agg).put("count", 1))); + scripts.put("state['count'] = 1", vars -> + aggScript(vars, state -> state.put("count", 1))); - scripts.put("_agg.add(1)", vars -> - aggScript(vars, agg -> ((List) agg).add(1))); + scripts.put("state.list.add(1)", vars -> + aggScript(vars, state -> { + // Lazily populate state.list for tests without an init script + if (state.containsKey("list") == false) { + state.put("list", new ArrayList()); + } - scripts.put("_agg[param1] = param2", vars -> - aggScript(vars, agg -> ((Map) agg).put(XContentMapValues.extractValue("params.param1", vars), + ((List) state.get("list")).add(1); + })); + + scripts.put("state[param1] = param2", vars -> + aggScript(vars, state -> state.put((String) XContentMapValues.extractValue("params.param1", vars), XContentMapValues.extractValue("params.param2", vars)))); scripts.put("vars.multiplier = 3", vars -> ((Map) vars.get("vars")).put("multiplier", 3)); - scripts.put("_agg.add(vars.multiplier)", vars -> - aggScript(vars, agg -> ((List) agg).add(XContentMapValues.extractValue("vars.multiplier", vars)))); + scripts.put("state.list.add(vars.multiplier)", vars -> + aggScript(vars, state -> { + // Lazily populate state.list for tests without an init script + if (state.containsKey("list") == false) { + state.put("list", new ArrayList()); + } + + ((List) state.get("list")).add(XContentMapValues.extractValue("vars.multiplier", vars)); + })); // Equivalent to: // // newaggregation = []; // sum = 0; // - // for (a in _agg) { - // sum += a + // for (s in state.list) { + // sum += s // }; // // newaggregation.add(sum); // return newaggregation" // - scripts.put("sum agg values as a new aggregation", vars -> { + scripts.put("sum state values as a new aggregation", vars -> { List newAggregation = new ArrayList(); - List agg = (List) vars.get("_agg"); + Map state = (Map) vars.get("state"); + List list = (List) state.get("list"); - if (agg != null) { + if (list != null) { Integer sum = 0; - for (Object a : (List) agg) { - sum += ((Number) a).intValue(); + for (Object s : list) { + sum += ((Number) s).intValue(); } newAggregation.add(sum); } @@ -137,24 +153,41 @@ public class ScriptedMetricIT extends ESIntegTestCase { // newaggregation = []; // sum = 0; // - // for (aggregation in _aggs) { - // for (a in aggregation) { - // sum += a + // for (state in states) { + // for (s in state) { + // sum += s // } // }; // // newaggregation.add(sum); // return newaggregation" // - scripts.put("sum aggs of agg values as a new aggregation", vars -> { + scripts.put("sum all states (lists) values as a new aggregation", vars -> { List newAggregation = new ArrayList(); Integer sum = 0; - List aggs = (List) vars.get("_aggs"); - for (Object aggregation : (List) aggs) { - if (aggregation != null) { - for (Object a : (List) aggregation) { - sum += ((Number) a).intValue(); + List> states = (List>) vars.get("states"); + for (List list : states) { + if (list != null) { + for (Object s : list) { + sum += ((Number) s).intValue(); + } + } + } + newAggregation.add(sum); + return newAggregation; + }); + + scripts.put("sum all states' state.list values as a new aggregation", vars -> { + List newAggregation = new ArrayList(); + Integer sum = 0; + + List> states = (List>) vars.get("states"); + for (Map state : states) { + List list = (List) state.get("list"); + if (list != null) { + for (Object s : list) { + sum += ((Number) s).intValue(); } } } @@ -167,25 +200,25 @@ public class ScriptedMetricIT extends ESIntegTestCase { // newaggregation = []; // sum = 0; // - // for (aggregation in _aggs) { - // for (a in aggregation) { - // sum += a + // for (state in states) { + // for (s in state) { + // sum += s // } // }; // // newaggregation.add(sum * multiplier); // return newaggregation" // - scripts.put("multiplied sum aggs of agg values as a new aggregation", vars -> { + scripts.put("multiplied sum all states (lists) values as a new aggregation", vars -> { Integer multiplier = (Integer) vars.get("multiplier"); List newAggregation = new ArrayList(); Integer sum = 0; - List aggs = (List) vars.get("_aggs"); - for (Object aggregation : (List) aggs) { - if (aggregation != null) { - for (Object a : (List) aggregation) { - sum += ((Number) a).intValue(); + List> states = (List>) vars.get("states"); + for (List list : states) { + if (list != null) { + for (Object s : list) { + sum += ((Number) s).intValue(); } } } @@ -193,53 +226,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { return newAggregation; }); - scripts.put("state.items = new ArrayList()", vars -> - aggContextScript(vars, state -> ((HashMap) state).put("items", new ArrayList()))); - - scripts.put("state.items.add(1)", vars -> - aggContextScript(vars, state -> { - HashMap stateMap = (HashMap) state; - List items = (List) stateMap.get("items"); - items.add(1); - })); - - scripts.put("sum context state values", vars -> { - int sum = 0; - HashMap state = (HashMap) vars.get("state"); - List items = (List) state.get("items"); - - for (Object x : items) { - sum += (Integer)x; - } - - return sum; - }); - - scripts.put("sum context states", vars -> { - Integer sum = 0; - - List states = (List) vars.get("states"); - for (Object state : states) { - sum += ((Number) state).intValue(); - } - - return sum; - }); - return scripts; } - static Object aggScript(Map vars, Consumer fn) { - return aggScript(vars, fn, "_agg"); - } - - static Object aggContextScript(Map vars, Consumer fn) { - return aggScript(vars, fn, "state"); - } - @SuppressWarnings("unchecked") - private static Object aggScript(Map vars, Consumer fn, String stateVarName) { - T aggState = (T) vars.get(stateVarName); + static Map aggScript(Map vars, Consumer> fn) { + Map aggState = (Map) vars.get("state"); fn.accept(aggState); return aggState; } @@ -285,17 +277,17 @@ public class ScriptedMetricIT extends ESIntegTestCase { assertAcked(client().admin().cluster().preparePutStoredScript() .setId("mapScript_stored") .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"_agg.add(vars.multiplier)\"} }"), XContentType.JSON)); + " \"source\": \"state.list.add(vars.multiplier)\"} }"), XContentType.JSON)); assertAcked(client().admin().cluster().preparePutStoredScript() .setId("combineScript_stored") .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"sum agg values as a new aggregation\"} }"), XContentType.JSON)); + " \"source\": \"sum state values as a new aggregation\"} }"), XContentType.JSON)); assertAcked(client().admin().cluster().preparePutStoredScript() .setId("reduceScript_stored") .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"sum aggs of agg values as a new aggregation\"} }"), XContentType.JSON)); + " \"source\": \"sum all states (lists) values as a new aggregation\"} }"), XContentType.JSON)); indexRandom(true, builders); ensureSearchable(); @@ -315,9 +307,10 @@ public class ScriptedMetricIT extends ESIntegTestCase { // the name of the file script is used in test method while the source of the file script // must match a predefined script from CustomScriptPlugin.pluginScripts() method Files.write(scripts.resolve("init_script.mockscript"), "vars.multiplier = 3".getBytes("UTF-8")); - Files.write(scripts.resolve("map_script.mockscript"), "_agg.add(vars.multiplier)".getBytes("UTF-8")); - Files.write(scripts.resolve("combine_script.mockscript"), "sum agg values as a new aggregation".getBytes("UTF-8")); - Files.write(scripts.resolve("reduce_script.mockscript"), "sum aggs of agg values as a new aggregation".getBytes("UTF-8")); + Files.write(scripts.resolve("map_script.mockscript"), "state.list.add(vars.multiplier)".getBytes("UTF-8")); + Files.write(scripts.resolve("combine_script.mockscript"), "sum state values as a new aggregation".getBytes("UTF-8")); + Files.write(scripts.resolve("reduce_script.mockscript"), + "sum all states (lists) values as a new aggregation".getBytes("UTF-8")); } catch (IOException e) { throw new RuntimeException("failed to create scripts"); } @@ -329,7 +322,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { } public void testMap() { - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg['count'] = 1", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -365,52 +358,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { assertThat(numShardsRun, greaterThan(0)); } - public void testExplicitAggParam() { - Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); - - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", Collections.emptyMap()); - - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(scriptedMetric("scripted").params(params).mapScript(mapScript)) - .get(); - assertSearchResponse(response); - assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); - - Aggregation aggregation = response.getAggregations().get("scripted"); - assertThat(aggregation, notNullValue()); - assertThat(aggregation, instanceOf(ScriptedMetric.class)); - ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation; - assertThat(scriptedMetricAggregation.getName(), equalTo("scripted")); - assertThat(scriptedMetricAggregation.aggregation(), notNullValue()); - assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class)); - List aggregationList = (List) scriptedMetricAggregation.aggregation(); - assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries)); - long totalCount = 0; - for (Object object : aggregationList) { - assertThat(object, notNullValue()); - assertThat(object, instanceOf(List.class)); - List list = (List) object; - for (Object o : list) { - assertThat(o, notNullValue()); - assertThat(o, instanceOf(Number.class)); - Number numberValue = (Number) o; - assertThat(numberValue, equalTo((Number) 1)); - totalCount += numberValue.longValue(); - } - } - assertThat(totalCount, equalTo(numDocs)); - } - - public void testMapWithParamsAndImplicitAggMap() { + public void testMapWithParams() { // Split the params up between the script and the aggregation. - // Don't put any _agg map in params. Map scriptParams = Collections.singletonMap("param1", "12"); Map aggregationParams = Collections.singletonMap("param2", 1); - // The _agg hashmap will be available even if not declared in the params map - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg[param1] = param2", scriptParams); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state[param1] = param2", scriptParams); SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -454,7 +407,6 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); SearchResponse response = client() @@ -466,7 +418,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .initScript( new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) .mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "_agg.add(vars.multiplier)", Collections.emptyMap()))) + "state.list.add(vars.multiplier)", Collections.emptyMap()))) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -483,8 +435,11 @@ public class ScriptedMetricIT extends ESIntegTestCase { long totalCount = 0; for (Object object : aggregationList) { assertThat(object, notNullValue()); - assertThat(object, instanceOf(List.class)); - List list = (List) object; + assertThat(object, instanceOf(HashMap.class)); + Map map = (Map) object; + assertThat(map, hasKey("list")); + assertThat(map.get("list"), instanceOf(List.class)); + List list = (List) map.get("list"); for (Object o : list) { assertThat(o, notNullValue()); assertThat(o, instanceOf(Number.class)); @@ -501,12 +456,11 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -553,13 +507,13 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -607,15 +561,15 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -652,15 +606,15 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", Collections.emptyMap()); SearchResponse searchResponse = client() .prepareSearch("idx") @@ -707,14 +661,14 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -749,13 +703,13 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states' state.list values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -789,12 +743,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states' state.list values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -828,18 +782,18 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Map reduceParams = new HashMap<>(); reduceParams.put("multiplier", 4); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "multiplied sum aggs of agg values as a new aggregation", reduceParams); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "multiplied sum all states (lists) values as a new aggregation", reduceParams); SearchResponse response = client() .prepareSearch("idx") @@ -875,7 +829,6 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); SearchResponse response = client() @@ -916,15 +869,15 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -977,15 +930,15 @@ public class ScriptedMetricIT extends ESIntegTestCase { varsMap.put("multiplier", 1); Map params = new HashMap<>(); - params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", + Collections.emptyMap()); Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", Collections.emptyMap()); SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) @@ -1021,7 +974,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { * not using a script does get cached. */ public void testDontCacheScripts() throws Exception { - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg['count'] = 1", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap()); assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get()); @@ -1047,7 +1000,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { public void testConflictingAggAndScriptParams() { Map params = Collections.singletonMap("param1", "12"); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", params); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", params); SearchRequestBuilder builder = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -1056,37 +1009,4 @@ public class ScriptedMetricIT extends ESIntegTestCase { SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); } - - public void testAggFromContext() { - Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items = new ArrayList()", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items.add(1)", Collections.emptyMap()); - Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context state values", Collections.emptyMap()); - Script reduceScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context states", - Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); - - Aggregation aggregation = response.getAggregations().get("scripted"); - assertThat(aggregation, notNullValue()); - assertThat(aggregation, instanceOf(ScriptedMetric.class)); - - ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation; - assertThat(scriptedMetricAggregation.getName(), equalTo("scripted")); - assertThat(scriptedMetricAggregation.aggregation(), notNullValue()); - - assertThat(scriptedMetricAggregation.aggregation(), instanceOf(Integer.class)); - Integer aggResult = (Integer) scriptedMetricAggregation.aggregation(); - long totalAgg = aggResult.longValue(); - assertThat(totalAgg, equalTo(numDocs)); - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricAggStateV6CompatTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricAggStateV6CompatTests.java new file mode 100644 index 00000000000..4abf68a960b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricAggStateV6CompatTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.scripted; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.aggregations.Aggregation.CommonFields; +import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.test.InternalAggregationTestCase; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.sameInstance; + +/** + * This test verifies that the _aggs param is added correctly when the system property + * "es.aggregations.enable_scripted_metric_agg_param" is set to true. + */ +public class InternalScriptedMetricAggStateV6CompatTests extends InternalAggregationTestCase { + + private static final String REDUCE_SCRIPT_NAME = "reduceScript"; + + @Override + protected InternalScriptedMetric createTestInstance(String name, List pipelineAggregators, + Map metaData) { + Script reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME, Collections.emptyMap()); + return new InternalScriptedMetric(name, "agg value", reduceScript, pipelineAggregators, metaData); + } + + /** + * Mock of the script service. The script that is run looks at the + * "_aggs" parameter to verify that it was put in place by InternalScriptedMetric. + */ + @Override + protected ScriptService mockScriptService() { + Function, Object> script = params -> { + Object aggs = params.get("_aggs"); + Object states = params.get("states"); + assertThat(aggs, instanceOf(List.class)); + assertThat(aggs, sameInstance(states)); + return aggs; + }; + + @SuppressWarnings("unchecked") + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + Collections.singletonMap(REDUCE_SCRIPT_NAME, script)); + Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); + return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); + } + + @Override + protected void assertReduced(InternalScriptedMetric reduced, List inputs) { + assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING); + } + + @Override + protected Reader instanceReader() { + return InternalScriptedMetric::new; + } + + @Override + protected void assertFromXContent(InternalScriptedMetric aggregation, ParsedAggregation parsedAggregation) {} + + @Override + protected Predicate excludePathsFromXContentInsertion() { + return path -> path.contains(CommonFields.VALUE.getPreferredName()); + } + + @Override + protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) { + String name = instance.getName(); + Object value = instance.aggregation(); + Script reduceScript = instance.reduceScript; + List pipelineAggregators = instance.pipelineAggregators(); + Map metaData = instance.getMetaData(); + return new InternalScriptedMetric(name + randomAlphaOfLength(5), value, reduceScript, pipelineAggregators, + metaData); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java index 584208af417..70ddacf5698 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java @@ -107,7 +107,7 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase ((List) script.get("_aggs")).size())); + Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List) script.get("states")).size())); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorAggStateV6CompatTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorAggStateV6CompatTests.java new file mode 100644 index 00000000000..bf78cae711b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorAggStateV6CompatTests.java @@ -0,0 +1,180 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.scripted; + +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +import static java.util.Collections.singleton; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +/** + * This test verifies that the _agg param is added correctly when the system property + * "es.aggregations.enable_scripted_metric_agg_param" is set to true. + */ +public class ScriptedMetricAggregatorAggStateV6CompatTests extends AggregatorTestCase { + + private static final String AGG_NAME = "scriptedMetric"; + private static final Script INIT_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScript", Collections.emptyMap()); + private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap()); + private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript", + Collections.emptyMap()); + + private static final Script INIT_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME, + "initScriptExplicitAgg", Collections.emptyMap()); + private static final Script MAP_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME, + "mapScriptExplicitAgg", Collections.emptyMap()); + private static final Script COMBINE_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME, + "combineScriptExplicitAgg", Collections.emptyMap()); + private static final String EXPLICIT_AGG_OBJECT = "Explicit agg object"; + + private static final Map, Object>> SCRIPTS = new HashMap<>(); + + @BeforeClass + @SuppressWarnings("unchecked") + public static void initMockScripts() { + // If _agg is provided implicitly, it should be the same objects as "state" from the context. + SCRIPTS.put("initScript", params -> { + Object agg = params.get("_agg"); + Object state = params.get("state"); + assertThat(agg, instanceOf(Map.class)); + assertThat(agg, sameInstance(state)); + return agg; + }); + SCRIPTS.put("mapScript", params -> { + Object agg = params.get("_agg"); + Object state = params.get("state"); + assertThat(agg, instanceOf(Map.class)); + assertThat(agg, sameInstance(state)); + return agg; + }); + SCRIPTS.put("combineScript", params -> { + Object agg = params.get("_agg"); + Object state = params.get("state"); + assertThat(agg, instanceOf(Map.class)); + assertThat(agg, sameInstance(state)); + return agg; + }); + + SCRIPTS.put("initScriptExplicitAgg", params -> { + Object agg = params.get("_agg"); + assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT)); + return agg; + }); + SCRIPTS.put("mapScriptExplicitAgg", params -> { + Object agg = params.get("_agg"); + assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT)); + return agg; + }); + SCRIPTS.put("combineScriptExplicitAgg", params -> { + Object agg = params.get("_agg"); + assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT)); + return agg; + }); + } + + /** + * Test that the _agg param is implicitly added + */ + public void testWithImplicitAggParam() throws IOException { + try (Directory directory = newDirectory()) { + Integer numDocs = 10; + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numDocs; i++) { + indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i))); + } + } + try (IndexReader indexReader = DirectoryReader.open(directory)) { + ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); + aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT); + search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + } + } + + assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING); + } + + /** + * Test that an explicitly added _agg param is honored + */ + public void testWithExplicitAggParam() throws IOException { + try (Directory directory = newDirectory()) { + Integer numDocs = 10; + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numDocs; i++) { + indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i))); + } + } + + Map aggParams = new HashMap<>(); + aggParams.put("_agg", EXPLICIT_AGG_OBJECT); + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); + aggregationBuilder + .params(aggParams) + .initScript(INIT_SCRIPT_EXPLICIT_AGG) + .mapScript(MAP_SCRIPT_EXPLICIT_AGG) + .combineScript(COMBINE_SCRIPT_EXPLICIT_AGG); + search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + } + } + + assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING); + } + + /** + * We cannot use Mockito for mocking QueryShardContext in this case because + * script-related methods (e.g. QueryShardContext#getLazyExecutableScript) + * is final and cannot be mocked + */ + @Override + protected QueryShardContext queryShardContextMock(MapperService mapperService) { + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS); + Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); + ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); + return new QueryShardContext(0, mapperService.getIndexSettings(), null, null, mapperService, null, scriptService, + xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index 5124503fc03..65e42556461 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -83,72 +83,72 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase { @SuppressWarnings("unchecked") public static void initMockScripts() { SCRIPTS.put("initScript", params -> { - Map agg = (Map) params.get("_agg"); - agg.put("collector", new ArrayList()); - return agg; - }); + Map state = (Map) params.get("state"); + state.put("collector", new ArrayList()); + return state; + }); SCRIPTS.put("mapScript", params -> { - Map agg = (Map) params.get("_agg"); - ((List) agg.get("collector")).add(1); // just add 1 for each doc the script is run on - return agg; + Map state = (Map) params.get("state"); + ((List) state.get("collector")).add(1); // just add 1 for each doc the script is run on + return state; }); SCRIPTS.put("combineScript", params -> { - Map agg = (Map) params.get("_agg"); - return ((List) agg.get("collector")).stream().mapToInt(Integer::intValue).sum(); + Map state = (Map) params.get("state"); + return ((List) state.get("collector")).stream().mapToInt(Integer::intValue).sum(); }); SCRIPTS.put("initScriptScore", params -> { - Map agg = (Map) params.get("_agg"); - agg.put("collector", new ArrayList()); - return agg; - }); + Map state = (Map) params.get("state"); + state.put("collector", new ArrayList()); + return state; + }); SCRIPTS.put("mapScriptScore", params -> { - Map agg = (Map) params.get("_agg"); - ((List) agg.get("collector")).add(((Number) params.get("_score")).doubleValue()); - return agg; + Map state = (Map) params.get("state"); + ((List) state.get("collector")).add(((Number) params.get("_score")).doubleValue()); + return state; }); SCRIPTS.put("combineScriptScore", params -> { - Map agg = (Map) params.get("_agg"); - return ((List) agg.get("collector")).stream().mapToDouble(Double::doubleValue).sum(); + Map state = (Map) params.get("state"); + return ((List) state.get("collector")).stream().mapToDouble(Double::doubleValue).sum(); }); SCRIPTS.put("initScriptParams", params -> { - Map agg = (Map) params.get("_agg"); + Map state = (Map) params.get("state"); Integer initialValue = (Integer)params.get("initialValue"); ArrayList collector = new ArrayList<>(); collector.add(initialValue); - agg.put("collector", collector); - return agg; + state.put("collector", collector); + return state; }); SCRIPTS.put("mapScriptParams", params -> { - Map agg = (Map) params.get("_agg"); + Map state = (Map) params.get("state"); Integer itemValue = (Integer) params.get("itemValue"); - ((List) agg.get("collector")).add(itemValue); - return agg; + ((List) state.get("collector")).add(itemValue); + return state; }); SCRIPTS.put("combineScriptParams", params -> { - Map agg = (Map) params.get("_agg"); + Map state = (Map) params.get("state"); int divisor = ((Integer) params.get("divisor")); - return ((List) agg.get("collector")).stream().mapToInt(Integer::intValue).map(i -> i / divisor).sum(); + return ((List) state.get("collector")).stream().mapToInt(Integer::intValue).map(i -> i / divisor).sum(); }); SCRIPTS.put("initScriptSelfRef", params -> { - Map agg = (Map) params.get("_agg"); - agg.put("collector", new ArrayList()); - agg.put("selfRef", agg); - return agg; + Map state = (Map) params.get("state"); + state.put("collector", new ArrayList()); + state.put("selfRef", state); + return state; }); SCRIPTS.put("mapScriptSelfRef", params -> { - Map agg = (Map) params.get("_agg"); - agg.put("selfRef", agg); - return agg; + Map state = (Map) params.get("state"); + state.put("selfRef", state); + return state; }); SCRIPTS.put("combineScriptSelfRef", params -> { - Map agg = (Map) params.get("_agg"); - agg.put("selfRef", agg); - return agg; + Map state = (Map) params.get("state"); + state.put("selfRef", state); + return state; }); } @@ -170,7 +170,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase { } /** - * without combine script, the "_aggs" map should contain a list of the size of the number of documents matched + * without combine script, the "states" map should contain a list of the size of the number of documents matched */ public void testScriptedMetricWithoutCombine() throws IOException { try (Directory directory = newDirectory()) { diff --git a/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 31fcfa7155c..4005f1218a9 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.scroll; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; @@ -198,6 +199,8 @@ public class DuelScrollIT extends ESIntegTestCase { } // no replicas, as they might be ordered differently settings.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0); + // we need to control refreshes as they might take different merges into account + settings.put("index.refresh_interval", -1); assertAcked(prepareCreate("test").setSettings(settings.build()).get()); final int numDocs = randomIntBetween(10, 200); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index d95db778a6a..44c49ace5de 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -493,15 +493,24 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } public void testGeoField() throws Exception { -// Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); -// Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder mapping = jsonBuilder(); mapping.startObject(); mapping.startObject(TYPE); mapping.startObject("properties"); + mapping.startObject("location"); + mapping.startObject("properties"); mapping.startObject("pin"); mapping.field("type", "geo_point"); + // Enable store and disable indexing sometimes + if (randomBoolean()) { + mapping.field("store", "true"); + } + if (randomBoolean()) { + mapping.field("index", "false"); + } + mapping.endObject(); // pin mapping.endObject(); + mapping.endObject(); // location mapping.startObject(FIELD); mapping.field("type", "completion"); mapping.field("analyzer", "simple"); @@ -510,7 +519,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { mapping.startObject(); mapping.field("name", "st"); mapping.field("type", "geo"); - mapping.field("path", "pin"); + mapping.field("path", "location.pin"); mapping.field("precision", 5); mapping.endObject(); mapping.endArray(); @@ -524,7 +533,9 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { XContentBuilder source1 = jsonBuilder() .startObject() + .startObject("location") .latlon("pin", 52.529172, 13.407333) + .endObject() .startObject(FIELD) .array("input", "Hotel Amsterdam in Berlin") .endObject() @@ -533,7 +544,9 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { XContentBuilder source2 = jsonBuilder() .startObject() + .startObject("location") .latlon("pin", 52.363389, 4.888695) + .endObject() .startObject(FIELD) .array("input", "Hotel Berlin in Amsterdam") .endObject() @@ -600,6 +613,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { private void createIndexAndMapping(CompletionMappingBuilder completionMappingBuilder) throws IOException { createIndexAndMappingAndSettings(Settings.EMPTY, completionMappingBuilder); } + private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) throws IOException { XContentBuilder mapping = jsonBuilder().startObject() .startObject(TYPE).startObject("properties") diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 56ff157ec71..a745384eb3e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -200,6 +201,70 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { assertContextSuggestFields(fields, 3); } + public void testMalformedGeoField() throws Exception { + XContentBuilder mapping = jsonBuilder(); + mapping.startObject(); + mapping.startObject("type1"); + mapping.startObject("properties"); + mapping.startObject("pin"); + String type = randomFrom("text", "keyword", "long"); + mapping.field("type", type); + mapping.endObject(); + mapping.startObject("suggestion"); + mapping.field("type", "completion"); + mapping.field("analyzer", "simple"); + + mapping.startArray("contexts"); + mapping.startObject(); + mapping.field("name", "st"); + mapping.field("type", "geo"); + mapping.field("path", "pin"); + mapping.field("precision", 5); + mapping.endObject(); + mapping.endArray(); + + mapping.endObject(); + + mapping.endObject(); + mapping.endObject(); + mapping.endObject(); + + ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class, + () -> createIndex("test", Settings.EMPTY, "type1", mapping)); + + assertThat(ex.getMessage(), equalTo("field [pin] referenced in context [st] must be mapped to geo_point, found [" + type + "]")); + } + + public void testMissingGeoField() throws Exception { + XContentBuilder mapping = jsonBuilder(); + mapping.startObject(); + mapping.startObject("type1"); + mapping.startObject("properties"); + mapping.startObject("suggestion"); + mapping.field("type", "completion"); + mapping.field("analyzer", "simple"); + + mapping.startArray("contexts"); + mapping.startObject(); + mapping.field("name", "st"); + mapping.field("type", "geo"); + mapping.field("path", "pin"); + mapping.field("precision", 5); + mapping.endObject(); + mapping.endArray(); + + mapping.endObject(); + + mapping.endObject(); + mapping.endObject(); + mapping.endObject(); + + ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class, + () -> createIndex("test", Settings.EMPTY, "type1", mapping)); + + assertThat(ex.getMessage(), equalTo("field [pin] referenced in context [st] is not defined in the mapping")); + } + public void testParsingQueryContextBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("ezs42e44yx96"); XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 8da4064d1c8..3d0388ccfad 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import java.util.function.Supplier; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -158,8 +159,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -198,8 +199,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -254,8 +255,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -276,7 +277,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(incompatibleTransport.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List seedNodes = Arrays.asList(incompatibleSeedNode, seedNode); + List> seedNodes = Arrays.asList(() -> incompatibleSeedNode, () -> seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -310,8 +311,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); assertFalse(service.nodeConnected(spareNode)); @@ -359,8 +360,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false)) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false)) { + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); if (rejectedNode.equals(seedNode)) { assertFalse(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -374,7 +375,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } - private void updateSeedNodes(RemoteClusterConnection connection, List seedNodes) throws Exception { + private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes) throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference exceptionAtomicReference = new AtomicReference<>(); ActionListener listener = ActionListener.wrap(x -> latch.countDown(), x -> { @@ -398,8 +399,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { - expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(seedNode))); + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(() -> seedNode))); assertFalse(service.nodeConnected(seedNode)); assertTrue(connection.assertNoRunningConnections()); } @@ -461,7 +462,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { connection.addConnectedNode(seedNode); for (DiscoveryNode node : knownNodes) { final Transport.Connection transportConnection = connection.getConnection(node); @@ -504,7 +505,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { CountDownLatch listenerCalled = new CountDownLatch(1); AtomicReference exceptionReference = new AtomicReference<>(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { ActionListener listener = ActionListener.wrap(x -> { listenerCalled.countDown(); fail("expected exception"); @@ -512,7 +513,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { exceptionReference.set(x); listenerCalled.countDown(); }); - connection.updateSeedNodes(Arrays.asList(seedNode), listener); + connection.updateSeedNodes(Arrays.asList(() -> seedNode), listener); acceptedLatch.await(); connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on assertTrue(connection.assertNoRunningConnections()); @@ -539,7 +540,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); - List nodes = Collections.singletonList(seedNode); + List> nodes = Collections.singletonList(() -> seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", nodes, service, Integer.MAX_VALUE, n -> true)) { if (randomBoolean()) { @@ -579,7 +580,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); - List nodes = Collections.singletonList(seedNode); + List> nodes = Collections.singletonList(() -> seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", nodes, service, Integer.MAX_VALUE, n -> true)) { SearchRequest request = new SearchRequest("test-index"); @@ -635,7 +636,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + Collections.singletonList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { SearchRequest request = new SearchRequest("test-index"); ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") @@ -738,7 +739,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(seedTransport1.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List seedNodes = Arrays.asList(seedNode1, seedNode); + List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -816,7 +817,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(seedTransport1.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List seedNodes = Arrays.asList(seedNode1, seedNode); + List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -904,7 +905,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(transport3.getLocalDiscoNode()); knownNodes.add(transport2.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List seedNodes = Arrays.asList(node3, node1, node2); + List> seedNodes = Arrays.asList(() -> node3, () -> node1, () -> node2); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -1059,7 +1060,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { assertFalse(service.nodeConnected(seedNode)); assertFalse(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -1108,9 +1109,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { if (randomBoolean()) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); } CountDownLatch responseLatch = new CountDownLatch(1); AtomicReference> reference = new AtomicReference<>(); @@ -1142,14 +1143,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { List discoverableTransports = new CopyOnWriteArrayList<>(); try { final int numDiscoverableNodes = randomIntBetween(5, 20); - List discoverableNodes = new ArrayList<>(numDiscoverableNodes); - for (int i = 0; i < numDiscoverableNodes; i++) { + List> discoverableNodes = new ArrayList<>(numDiscoverableNodes); + for (int i = 0; i < numDiscoverableNodes; i++ ) { MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT); - discoverableNodes.add(transportService.getLocalDiscoNode()); + discoverableNodes.add(transportService::getLocalDiscoNode); discoverableTransports.add(transportService); } - List seedNodes = randomSubsetOf(discoverableNodes); + List> seedNodes = randomSubsetOf(discoverableNodes); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -1198,7 +1199,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { discoverableTransports.add(transportService); connection.addConnectedNode(transportService.getLocalDiscoNode()); } else { - DiscoveryNode node = randomFrom(discoverableNodes); + DiscoveryNode node = randomFrom(discoverableNodes).get(); connection.onNodeDisconnected(node); } } @@ -1246,12 +1247,13 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { - updateSeedNodes(connection, Arrays.asList(seedNode)); + Arrays.asList( () -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); - List discoveryNodes = Arrays.asList(otherClusterTransport.getLocalDiscoNode(), seedNode); + List> discoveryNodes = + Arrays.asList(() -> otherClusterTransport.getLocalDiscoNode(), () -> seedNode); Collections.shuffle(discoveryNodes, random()); updateSeedNodes(connection, discoveryNodes); assertTrue(service.nodeConnected(seedNode)); @@ -1262,7 +1264,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { assertTrue(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> - updateSeedNodes(connection, Arrays.asList(otherClusterTransport.getLocalDiscoNode()))); + updateSeedNodes(connection, Arrays.asList(() -> otherClusterTransport.getLocalDiscoNode()))); assertThat(illegalStateException.getMessage(), startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" + " - {other_cluster_discoverable_node}")); @@ -1325,7 +1327,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(connectedNode), service, Integer.MAX_VALUE, n -> true)) { + Collections.singletonList(() -> connectedNode), service, Integer.MAX_VALUE, n -> true)) { connection.addConnectedNode(connectedNode); for (int i = 0; i < 10; i++) { //always a direct connection as the remote node is already connected @@ -1348,4 +1350,34 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } } + + public void testLazyResolveTransportAddress() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + CountDownLatch multipleResolveLatch = new CountDownLatch(2); + Supplier seedSupplier = () -> { + multipleResolveLatch.countDown(); + return seedNode; + }; + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(seedSupplier)); + // Closing connections leads to RemoteClusterConnection.ConnectHandler.collectRemoteNodes + // being called again so we try to resolve the same seed node's host twice + discoverableTransport.close(); + seedTransport.close(); + assertTrue(multipleResolveLatch.await(30L, TimeUnit.SECONDS)); + } + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 03d76b5a953..c94b1cbdef5 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import java.util.function.Supplier; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; @@ -103,10 +104,19 @@ public class RemoteClusterServiceTests extends ESTestCase { .put("search.remote.foo.seeds", "192.168.0.1").build(); expectThrows(IllegalArgumentException.class, () -> RemoteClusterAware.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(brokenSettings).forEach(setting -> setting.get(brokenSettings))); + + Settings brokenPortSettings = Settings.builder() + .put("search.remote.foo.seeds", "192.168.0.1:123456789123456789").build(); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterAware.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(brokenSettings) + .forEach(setting -> setting.get(brokenPortSettings)) + ); + assertEquals("failed to parse port", e.getMessage()); } public void testBuiltRemoteClustersSeeds() throws Exception { - Map> map = RemoteClusterService.buildRemoteClustersSeeds( + Map>> map = RemoteClusterService.buildRemoteClustersSeeds( Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080").put("search.remote.bar.seeds", "[::1]:9090").build()); assertEquals(2, map.size()); assertTrue(map.containsKey("foo")); @@ -114,13 +124,13 @@ public class RemoteClusterServiceTests extends ESTestCase { assertEquals(1, map.get("foo").size()); assertEquals(1, map.get("bar").size()); - DiscoveryNode foo = map.get("foo").get(0); + DiscoveryNode foo = map.get("foo").get(0).get(); assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080))); assertEquals(foo.getId(), "foo#192.168.0.1:8080"); assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode bar = map.get("bar").get(0); + DiscoveryNode bar = map.get("bar").get(0).get(); assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090))); assertEquals(bar.getId(), "bar#[::1]:9090"); assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); @@ -194,10 +204,10 @@ public class RemoteClusterServiceTests extends ESTestCase { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); assertFalse(service.isCrossClusterSearchEnabled()); - service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().address())); + service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().toString())); assertTrue(service.isCrossClusterSearchEnabled()); assertTrue(service.isRemoteClusterRegistered("cluster_1")); - service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().address())); + service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().toString())); assertTrue(service.isCrossClusterSearchEnabled()); assertTrue(service.isRemoteClusterRegistered("cluster_1")); assertTrue(service.isRemoteClusterRegistered("cluster_2")); @@ -252,22 +262,17 @@ public class RemoteClusterServiceTests extends ESTestCase { service.initializeRemoteClusters(); assertFalse(service.isCrossClusterSearchEnabled()); - final InetSocketAddress c1N1Address = c1N1Node.getAddress().address(); - final InetSocketAddress c1N2Address = c1N2Node.getAddress().address(); - final InetSocketAddress c2N1Address = c2N1Node.getAddress().address(); - final InetSocketAddress c2N2Address = c2N2Node.getAddress().address(); - final CountDownLatch firstLatch = new CountDownLatch(1); service.updateRemoteCluster( "cluster_1", - Arrays.asList(c1N1Address, c1N2Address), + Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), connectionListener(firstLatch)); firstLatch.await(); final CountDownLatch secondLatch = new CountDownLatch(1); service.updateRemoteCluster( "cluster_2", - Arrays.asList(c2N1Address, c2N2Address), + Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), connectionListener(secondLatch)); secondLatch.await(); @@ -321,22 +326,17 @@ public class RemoteClusterServiceTests extends ESTestCase { service.initializeRemoteClusters(); assertFalse(service.isCrossClusterSearchEnabled()); - final InetSocketAddress c1N1Address = c1N1Node.getAddress().address(); - final InetSocketAddress c1N2Address = c1N2Node.getAddress().address(); - final InetSocketAddress c2N1Address = c2N1Node.getAddress().address(); - final InetSocketAddress c2N2Address = c2N2Node.getAddress().address(); - final CountDownLatch firstLatch = new CountDownLatch(1); service.updateRemoteCluster( "cluster_1", - Arrays.asList(c1N1Address, c1N2Address), + Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), connectionListener(firstLatch)); firstLatch.await(); final CountDownLatch secondLatch = new CountDownLatch(1); service.updateRemoteCluster( "cluster_2", - Arrays.asList(c2N1Address, c2N2Address), + Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), connectionListener(secondLatch)); secondLatch.await(); @@ -398,22 +398,17 @@ public class RemoteClusterServiceTests extends ESTestCase { service.initializeRemoteClusters(); assertFalse(service.isCrossClusterSearchEnabled()); - final InetSocketAddress c1N1Address = c1N1Node.getAddress().address(); - final InetSocketAddress c1N2Address = c1N2Node.getAddress().address(); - final InetSocketAddress c2N1Address = c2N1Node.getAddress().address(); - final InetSocketAddress c2N2Address = c2N2Node.getAddress().address(); - final CountDownLatch firstLatch = new CountDownLatch(1); service.updateRemoteCluster( "cluster_1", - Arrays.asList(c1N1Address, c1N2Address), + Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), connectionListener(firstLatch)); firstLatch.await(); final CountDownLatch secondLatch = new CountDownLatch(1); service.updateRemoteCluster( "cluster_2", - Arrays.asList(c2N1Address, c2N2Address), + Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), connectionListener(secondLatch)); secondLatch.await(); CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 70f6061985a..aab84355581 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -722,3 +722,30 @@ setups['sensor_prefab_data'] = ''' {"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} ''' +setups['admin_role'] = ''' + - do: + xpack.security.put_role: + name: "my_admin_role" + body: > + { + "cluster": ["all"], + "indices": [ + {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} + ], + "run_as": [ "other_user" ], + "metadata" : {"version": 1} + } +''' +setups['jacknich_user'] = ''' + - do: + xpack.security.put_user: + username: "jacknich" + body: > + { + "password" : "test-password", + "roles" : [ "admin", "other_role1" ], + "full_name" : "Jack Nicholson", + "email" : "jacknich@example.com", + "metadata" : { "intelligence" : 7 } + } +''' diff --git a/x-pack/docs/en/rest-api/rollup-api.asciidoc b/x-pack/docs/en/rest-api/rollup-api.asciidoc index f1cd7c285a7..9a8ec00d77a 100644 --- a/x-pack/docs/en/rest-api/rollup-api.asciidoc +++ b/x-pack/docs/en/rest-api/rollup-api.asciidoc @@ -16,6 +16,7 @@ === Data * <> +* <> [float] [[rollup-search-endpoint]] @@ -31,5 +32,6 @@ include::rollup/put-job.asciidoc[] include::rollup/start-job.asciidoc[] include::rollup/stop-job.asciidoc[] include::rollup/rollup-caps.asciidoc[] +include::rollup/rollup-index-caps.asciidoc[] include::rollup/rollup-search.asciidoc[] include::rollup/rollup-job-config.asciidoc[] \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc index f770adf1f0d..1f233f195a0 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc @@ -27,8 +27,8 @@ live? ==== Path Parameters `index`:: - (string) Index, indices or index-pattern to return rollup capabilities for. If omitted (or `_all` is used) all available - rollup job capabilities will be returned + (string) Index, indices or index-pattern to return rollup capabilities for. `_all` may be used to fetch + rollup capabilities from all jobs ==== Request Body diff --git a/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc index 4636d9775e9..e5ca70cd59c 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc @@ -26,15 +26,13 @@ This API will allow you to determine: `index`:: (string) Index or index-pattern of concrete rollup indices to check for capabilities. - - ==== Request Body There is no request body for the Get Jobs API. ==== Authorization -You must have `monitor`, `monitor_rollup`, `manage` or `manage_rollup` cluster privileges to use this API. +You must have the `read` index privilege on the index that stores the rollup results. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. diff --git a/x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc index ef0ea6f00f7..2ba92b6b59e 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc @@ -82,6 +82,12 @@ In the above example, there are several pieces of logistical configuration for t will tend to execute faster, but will require more memory during processing. This has no effect on how the data is rolled up, it is merely used for tweaking the speed/memory cost of the indexer. +[NOTE] +The `index_pattern` cannot be a pattern that would also match the destination `rollup_index`. E.g. the pattern +`"foo-*"` would match the rollup index `"foo-rollup"`. This causes problems because the rollup job would attempt +to rollup it's own data at runtime. If you attempt to configure a pattern that matches the `rollup_index`, an exception +will be thrown to prevent this behavior. + [[rollup-groups-config]] ==== Grouping Config diff --git a/x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc index 470cbc4eaf5..f595d52ec10 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc @@ -34,7 +34,7 @@ or using `_all`, is not permitted The request body supports a subset of features from the regular Search API. It supports: -- `query` param for specifying an DSL query, subject to some limitations +- `query` param for specifying an DSL query, subject to some limitations (see <> and <> - `aggregations` param for specifying aggregations Functionality that is not available: diff --git a/x-pack/docs/en/rest-api/security.asciidoc b/x-pack/docs/en/rest-api/security.asciidoc index 227e343192a..f5b0c8eef66 100644 --- a/x-pack/docs/en/rest-api/security.asciidoc +++ b/x-pack/docs/en/rest-api/security.asciidoc @@ -2,21 +2,59 @@ [[security-api]] == Security APIs +You can use the following APIs to perform {security} activities. + * <> * <> * <> -* <> * <> * <> -* <> -* <> + +[float] +[[security-role-apis]] +=== Roles + +You can use the following APIs to add, remove, and retrieve roles in the native realm: + +* <>, <> +* <> +* <> + +[float] +[[security-token-apis]] +=== Tokens + +You can use the following APIs to create and invalidate bearer tokens for access +without requiring basic authentication: + +* <>, <> + +[float] +[[security-user-apis]] +=== Users + +You can use the following APIs to create, read, update, and delete users from the +native realm: + +* <>, <> +* <>, <> +* <> +* <> include::security/authenticate.asciidoc[] include::security/change-password.asciidoc[] include::security/clear-cache.asciidoc[] +include::security/clear-roles-cache.asciidoc[] +include::security/create-roles.asciidoc[] +include::security/create-users.asciidoc[] +include::security/delete-roles.asciidoc[] +include::security/delete-tokens.asciidoc[] +include::security/delete-users.asciidoc[] +include::security/disable-users.asciidoc[] +include::security/enable-users.asciidoc[] +include::security/get-roles.asciidoc[] +include::security/get-tokens.asciidoc[] +include::security/get-users.asciidoc[] include::security/privileges.asciidoc[] -include::security/roles.asciidoc[] include::security/role-mapping.asciidoc[] include::security/ssl.asciidoc[] -include::security/tokens.asciidoc[] -include::security/users.asciidoc[] diff --git a/x-pack/docs/en/rest-api/security/change-password.asciidoc b/x-pack/docs/en/rest-api/security/change-password.asciidoc index 7dee98480e7..6e6e8cf7375 100644 --- a/x-pack/docs/en/rest-api/security/change-password.asciidoc +++ b/x-pack/docs/en/rest-api/security/change-password.asciidoc @@ -1,9 +1,8 @@ [role="xpack"] [[security-api-change-password]] -=== Change Password API +=== Change passwords API -The Change Password API enables you to submit a request to change the password -of a user. +Changes the passwords of users in the native realm. ==== Request @@ -12,6 +11,15 @@ of a user. `POST _xpack/security/user//_password` +==== Description + +You can use the <> to update everything +but a user's `username` and `password`. This API changes a user's password. + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + + ==== Path Parameters `username`:: @@ -33,16 +41,17 @@ privilege can change passwords of other users. ==== Examples -The following example updates the password for the `elastic` user: +The following example updates the password for the `jacknich` user: [source,js] -------------------------------------------------- -POST _xpack/security/user/elastic/_password +POST /_xpack/security/user/jacknich/_password { - "password": "x-pack-test-password" + "password" : "s3cr3t" } -------------------------------------------------- // CONSOLE +// TEST[setup:jacknich_user] A successful call returns an empty JSON structure. diff --git a/x-pack/docs/en/rest-api/security/clear-roles-cache.asciidoc b/x-pack/docs/en/rest-api/security/clear-roles-cache.asciidoc new file mode 100644 index 00000000000..591d7eb2d11 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/clear-roles-cache.asciidoc @@ -0,0 +1,39 @@ +[role="xpack"] +[[security-api-clear-role-cache]] +=== Clear roles cache API + +Evicts roles from the native role cache. + +==== Request + +`POST /_xpack/security/role//_clear_cache` + +==== Description + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`name`:: + (string) The name of the role. + + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster +privilege. + + +==== Examples + +The clear roles cache API evicts roles from the native role cache. For example, +to clear the cache for `my_admin_role`: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/role/my_admin_role/_clear_cache +-------------------------------------------------- +// CONSOLE diff --git a/x-pack/docs/en/rest-api/security/create-roles.asciidoc b/x-pack/docs/en/rest-api/security/create-roles.asciidoc new file mode 100644 index 00000000000..749676b4e83 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/create-roles.asciidoc @@ -0,0 +1,102 @@ +[role="xpack"] +[[security-api-put-role]] +=== Create roles API + +Adds roles in the native realm. + +==== Request + +`POST /_xpack/security/role/` + + +`PUT /_xpack/security/role/` + + +==== Description + +The role API is generally the preferred way to manage roles, rather than using +file-based role management. For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + + +==== Path Parameters + +`name`:: + (string) The name of the role. + + +==== Request Body + +The following parameters can be specified in the body of a PUT or POST request +and pertain to adding a role: + +`cluster`:: (list) A list of cluster privileges. These privileges define the +cluster level actions that users with this role are able to execute. + +`indices`:: (list) A list of indices permissions entries. +`field_security`::: (list) The document fields that the owners of the role have +read access to. For more information, see +{stack-ov}/field-and-document-access-control.html[Setting up field and document level security]. +`names` (required)::: (list) A list of indices (or index name patterns) to which the +permissions in this entry apply. +`privileges`(required)::: (list) The index level privileges that the owners of the role +have on the specified indices. +`query`::: A search query that defines the documents the owners of the role have +read access to. A document within the specified indices must match this query in +order for it to be accessible by the owners of the role. + +`metadata`:: (object) Optional meta-data. Within the `metadata` object, keys +that begin with `_` are reserved for system usage. + +`run_as`:: (list) A list of users that the owners of this role can impersonate. +For more information, see +{stack-ov}/run-as-privilege.html[Submitting requests on behalf of other users]. + +For more information, see {stack-ov}/defining-roles.html[Defining roles]. + + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster +privilege. + + +==== Examples + +The following example adds a role called `my_admin_role`: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/role/my_admin_role +{ + "cluster": ["all"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["all"], + "field_security" : { // optional + "grant" : [ "title", "body" ] + }, + "query": "{\"match\": {\"title\": \"foo\"}}" // optional + } + ], + "run_as": [ "other_user" ], // optional + "metadata" : { // optional + "version" : 1 + } +} +-------------------------------------------------- +// CONSOLE + +A successful call returns a JSON structure that shows whether the role has been +created or updated. + +[source,js] +-------------------------------------------------- +{ + "role": { + "created": true <1> + } +} +-------------------------------------------------- +// TESTRESPONSE +<1> When an existing role is updated, `created` is set to false. diff --git a/x-pack/docs/en/rest-api/security/create-users.asciidoc b/x-pack/docs/en/rest-api/security/create-users.asciidoc new file mode 100644 index 00000000000..5015d0401c2 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/create-users.asciidoc @@ -0,0 +1,107 @@ +[role="xpack"] +[[security-api-put-user]] +=== Create users API + +Creates and updates users in the native realm. These users are commonly referred +to as _native users_. + + +==== Request + +`POST /_xpack/security/user/` + + +`PUT /_xpack/security/user/` + + +==== Description + +When updating a user, you can update everything but its `username` and `password`. +To change a user's password, use the +<>. + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`username` (required):: + (string) An identifier for the user. ++ +-- +[[username-validation]] +NOTE: Usernames must be at least 1 and no more than 1024 characters. They can +contain alphanumeric characters (`a-z`, `A-Z`, `0-9`), spaces, punctuation, and +printable symbols in the https://en.wikipedia.org/wiki/Basic_Latin_(Unicode_block)[Basic Latin (ASCII) block]. Leading or trailing whitespace is not allowed. + +-- + + +==== Request Body + +The following parameters can be specified in the body of a POST or PUT request: + +`enabled`:: +(boolean) Specifies whether the user is enabled. The default value is `true`. + +`email`:: +(string) The email of the user. + +`full_name`:: +(string) The full name of the user. + +`metadata`:: +(object) Arbitrary metadata that you want to associate with the user. + +`password` (required):: +(string) The user's password. Passwords must be at least 6 characters long. + +`roles` (required):: +(list) A set of roles the user has. The roles determine the user's access +permissions. To create a user without any roles, specify an empty list: `[]`. + + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster privilege. + + +==== Examples + +The following example creates a user `jacknich`: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/user/jacknich +{ + "password" : "j@rV1s", + "roles" : [ "admin", "other_role1" ], + "full_name" : "Jack Nicholson", + "email" : "jacknich@example.com", + "metadata" : { + "intelligence" : 7 + } +} +-------------------------------------------------- +// CONSOLE + +A successful call returns a JSON structure that shows whether the user has been +created or updated. + +[source,js] +-------------------------------------------------- +{ + "user": { + "created" : true <1> + } +} +-------------------------------------------------- +// TESTRESPONSE +<1> When an existing user is updated, `created` is set to false. + +After you add a user, requests from that user can be authenticated. For example: + +[source,shell] +-------------------------------------------------- +curl -u jacknich:j@rV1s http://localhost:9200/_cluster/health +-------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/rest-api/security/delete-roles.asciidoc b/x-pack/docs/en/rest-api/security/delete-roles.asciidoc new file mode 100644 index 00000000000..db42493ca0f --- /dev/null +++ b/x-pack/docs/en/rest-api/security/delete-roles.asciidoc @@ -0,0 +1,53 @@ +[role="xpack"] +[[security-api-delete-role]] +=== Delete roles API + +Removes roles in the native realm. + +==== Request + +`DELETE /_xpack/security/role/` + + +==== Description + +The Roles API is generally the preferred way to manage roles, rather than using +file-based role management. For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + + +==== Path Parameters + +`name`:: + (string) The name of the role. + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster +privilege. + + +==== Examples + +The following example deletes a `my_admin_role` role: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/role/my_admin_role +-------------------------------------------------- +// CONSOLE +// TEST[setup:admin_role] + +If the role is successfully deleted, the request returns `{"found": true}`. +Otherwise, `found` is set to false. + +[source,js] +-------------------------------------------------- +{ + "found" : true +} +-------------------------------------------------- +// TESTRESPONSE + diff --git a/x-pack/docs/en/rest-api/security/delete-tokens.asciidoc b/x-pack/docs/en/rest-api/security/delete-tokens.asciidoc new file mode 100644 index 00000000000..7d6bae2a4c4 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/delete-tokens.asciidoc @@ -0,0 +1,54 @@ +[role="xpack"] +[[security-api-invalidate-token]] +=== Delete token API + +Invalidates a bearer token for access without requiring basic authentication. + +==== Request + +`DELETE /_xpack/security/oauth2/token` + +==== Description + +The tokens returned by the <> have a +finite period of time for which they are valid and after that time period, they +can no longer be used. That time period is defined by the +`xpack.security.authc.token.timeout` setting. For more information, see +<>. + +If you want to invalidate a token immediately, use this delete token API. + + +==== Request Body + +The following parameters can be specified in the body of a DELETE request and +pertain to deleting a token: + +`token` (required):: +(string) An access token. + +==== Examples + +The following example invalidates the specified token immediately: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/oauth2/token +{ + "token" : "dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==" +} +-------------------------------------------------- +// NOTCONSOLE + +A successful call returns a JSON structure that indicates whether the token +has already been invalidated. + +[source,js] +-------------------------------------------------- +{ + "created" : true <1> +} +-------------------------------------------------- +// NOTCONSOLE + +<1> When a token has already been invalidated, `created` is set to false. diff --git a/x-pack/docs/en/rest-api/security/delete-users.asciidoc b/x-pack/docs/en/rest-api/security/delete-users.asciidoc new file mode 100644 index 00000000000..63a66795617 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/delete-users.asciidoc @@ -0,0 +1,48 @@ +[role="xpack"] +[[security-api-delete-user]] +=== Delete users API + +Deletes users from the native realm. + +==== Request + +`DELETE /_xpack/security/user/` + +==== Description + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`username` (required):: + (string) An identifier for the user. + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster privilege. + + +==== Examples + +The following example deletes the user `jacknich`: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/user/jacknich +-------------------------------------------------- +// CONSOLE +// TEST[setup:jacknich_user] + +If the user is successfully deleted, the request returns `{"found": true}`. +Otherwise, `found` is set to false. + +[source,js] +-------------------------------------------------- +{ + "found" : true +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/x-pack/docs/en/rest-api/security/disable-users.asciidoc b/x-pack/docs/en/rest-api/security/disable-users.asciidoc new file mode 100644 index 00000000000..f5a6bc7e9a1 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/disable-users.asciidoc @@ -0,0 +1,43 @@ +[role="xpack"] +[[security-api-disable-user]] +=== Disable users API + +Disables users in the native realm. + + +==== Request + +`PUT /_xpack/security/user//_disable` + + +==== Description + +By default, when you create users, they are enabled. You can use this API to +revoke a user's access to {es}. To re-enable a user, there is an +<>. + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`username` (required):: + (string) An identifier for the user. + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster privilege. + + +==== Examples + +The following example disables the user `jacknich`: + +[source,js] +-------------------------------------------------- +PUT /_xpack/security/user/jacknich/_disable +-------------------------------------------------- +// CONSOLE +// TEST[setup:jacknich_user] diff --git a/x-pack/docs/en/rest-api/security/enable-users.asciidoc b/x-pack/docs/en/rest-api/security/enable-users.asciidoc new file mode 100644 index 00000000000..cebaaffa7b2 --- /dev/null +++ b/x-pack/docs/en/rest-api/security/enable-users.asciidoc @@ -0,0 +1,42 @@ +[role="xpack"] +[[security-api-enable-user]] +=== Enable users API + +Enables users in the native realm. + + +==== Request + +`PUT /_xpack/security/user//_enable` + + +==== Description + +By default, when you create users, they are enabled. You can use this enable +users API and the <> to change that attribute. + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`username` (required):: + (string) An identifier for the user. + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster privilege. + + +==== Examples + +The following example enables the user `jacknich`: + +[source,js] +-------------------------------------------------- +PUT /_xpack/security/user/jacknich/_enable +-------------------------------------------------- +// CONSOLE +// TEST[setup:jacknich_user] diff --git a/x-pack/docs/en/rest-api/security/get-roles.asciidoc b/x-pack/docs/en/rest-api/security/get-roles.asciidoc new file mode 100644 index 00000000000..fa6e91b519b --- /dev/null +++ b/x-pack/docs/en/rest-api/security/get-roles.asciidoc @@ -0,0 +1,85 @@ +[role="xpack"] +[[security-api-get-role]] +=== Get roles API + +Retrieves roles in the native realm. + +==== Request + +`GET /_xpack/security/role` + + +`GET /_xpack/security/role/` + + +==== Description + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`name`:: + (string) The name of the role. You can specify multiple roles as a + comma-separated list. If you do not specify this parameter, the API + returns information about all roles. + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster +privilege. + + +==== Examples + +The following example retrieves information about the `my_admin_role` role in +the native realm: + +[source,js] +-------------------------------------------------- +GET /_xpack/security/role/my_admin_role +-------------------------------------------------- +// CONSOLE +// TEST[setup:admin_role] + +A successful call returns an array of roles with the JSON representation of the +role. If the role is not defined in the native realm, the request returns 404. + +[source,js] +-------------------------------------------------- +{ + "my_admin_role": { + "cluster" : [ "all" ], + "indices" : [ + { + "names" : [ "index1", "index2" ], + "privileges" : [ "all" ], + "field_security" : { + "grant" : [ "title", "body" ]} + } + ], + "applications" : [ ], + "run_as" : [ "other_user" ], + "metadata" : { + "version" : 1 + }, + "transient_metadata": { + "enabled": true + } + } +} +-------------------------------------------------- +// TESTRESPONSE + +To retrieve all roles, omit the role name: + +[source,js] +-------------------------------------------------- +GET /_xpack/security/role +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +NOTE: If single role is requested, that role is returned as the response. When +requesting multiple roles, an object is returned holding the found roles, each +keyed by the relevant role name. diff --git a/x-pack/docs/en/rest-api/security/tokens.asciidoc b/x-pack/docs/en/rest-api/security/get-tokens.asciidoc similarity index 62% rename from x-pack/docs/en/rest-api/security/tokens.asciidoc rename to x-pack/docs/en/rest-api/security/get-tokens.asciidoc index f991a5c0cb8..a2c4e6d7a37 100644 --- a/x-pack/docs/en/rest-api/security/tokens.asciidoc +++ b/x-pack/docs/en/rest-api/security/get-tokens.asciidoc @@ -1,15 +1,12 @@ [role="xpack"] -[[security-api-tokens]] -=== Token Management APIs +[[security-api-get-token]] +=== Get token API -The `token` API enables you to create and invalidate bearer tokens for access -without requiring basic authentication. +Creates a bearer token for access without requiring basic authentication. ==== Request -`POST /_xpack/security/oauth2/token` + - -`DELETE /_xpack/security/oauth2/token` +`POST /_xpack/security/oauth2/token` ==== Description @@ -19,20 +16,20 @@ you can explicitly enable the `xpack.security.authc.token.enabled` setting. When you are running in production mode, a bootstrap check prevents you from enabling the token service unless you also enable TLS on the HTTP interface. -The Get Token API takes the same parameters as a typical OAuth 2.0 token API +The get token API takes the same parameters as a typical OAuth 2.0 token API except for the use of a JSON request body. -A successful Get Token API call returns a JSON structure that contains the access +A successful get token API call returns a JSON structure that contains the access token, the amount of time (seconds) that the token expires in, the type, and the scope if available. -The tokens returned by the Get Token API have a finite period of time for which +The tokens returned by the get token API have a finite period of time for which they are valid and after that time period, they can no longer be used. That time period is defined by the `xpack.security.authc.token.timeout` setting. For more information, see <>. -If you want to invalidate a token immediately, you can do so by using the Delete -Token API. +If you want to invalidate a token immediately, you can do so by using the +<>. ==== Request Body @@ -41,28 +38,28 @@ The following parameters can be specified in the body of a POST request and pertain to creating a token: `grant_type`:: -(string) The type of grant. Currently only the `password` grant type is supported. +(string) The type of grant. Valid grant types are: `password` and `refresh_token`. -`password` (required):: -(string) The user's password. +`password`:: +(string) The user's password. If you specify the `password` grant type, this +parameter is required. + +`refresh_token`:: +(string) If you specify the `refresh_token` grant type, this parameter is +required. It contains the string that was returned when you created the token +and enables you to extend its life. `scope`:: (string) The scope of the token. Currently tokens are only issued for a scope of `FULL` regardless of the value sent with the request. -`username` (required):: -(string) The username that identifies the user. - -The following parameters can be specified in the body of a DELETE request and -pertain to deleting a token: - -`token`:: -(string) An access token. +`username`:: +(string) The username that identifies the user. If you specify the `password` +grant type, this parameter is required. ==== Examples -[[security-api-get-token]] -To obtain a token, submit a POST request to the `/_xpack/security/oauth2/token` -endpoint. + +The following example obtains a token for the `test_admin` user: [source,js] -------------------------------------------------- @@ -101,8 +98,8 @@ curl -H "Authorization: Bearer dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvb // NOTCONSOLE [[security-api-refresh-token]] -To extend the life of an existing token, the token api may be called again with the refresh -token within 24 hours of the token's creation. +To extend the life of an existing token, you can call the API again with the +refresh token within 24 hours of the token's creation. For example: [source,js] -------------------------------------------------- @@ -116,7 +113,8 @@ POST /_xpack/security/oauth2/token // TEST[s/vLBPvmAB6KvwvJZr27cS/$body.refresh_token/] // TEST[continued] -The API will return a new token and refresh token. Each refresh token may only be used one time. +The API will return a new token and refresh token. Each refresh token may only +be used one time. [source,js] -------------------------------------------------- @@ -128,32 +126,4 @@ The API will return a new token and refresh token. Each refresh token may only b } -------------------------------------------------- // TESTRESPONSE[s/dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==/$body.access_token/] -// TESTRESPONSE[s/vLBPvmAB6KvwvJZr27cS/$body.refresh_token/] - -[[security-api-invalidate-token]] -If a token must be invalidated immediately, you can do so by submitting a DELETE -request to `/_xpack/security/oauth2/token`. For example: - -[source,js] --------------------------------------------------- -DELETE /_xpack/security/oauth2/token -{ - "token" : "dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==" -} --------------------------------------------------- -// CONSOLE -// TEST[s/dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==/$body.access_token/] -// TEST[continued] - -A successful call returns a JSON structure that indicates whether the token -has already been invalidated. - -[source,js] --------------------------------------------------- -{ - "created" : true <1> -} --------------------------------------------------- -// TESTRESPONSE - -<1> When a token has already been invalidated, `created` is set to false. +// TESTRESPONSE[s/vLBPvmAB6KvwvJZr27cS/$body.refresh_token/] \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/security/get-users.asciidoc b/x-pack/docs/en/rest-api/security/get-users.asciidoc new file mode 100644 index 00000000000..2a20baacb0f --- /dev/null +++ b/x-pack/docs/en/rest-api/security/get-users.asciidoc @@ -0,0 +1,74 @@ +[role="xpack"] +[[security-api-get-user]] +=== Get users API + +Retrieves information about users in the native realm. + + +==== Request + +`GET /_xpack/security/user` + + +`GET /_xpack/security/user/` + +==== Description + +For more information about the native realm, see +{stack-ov}/realms.html[Realms] and <>. + +==== Path Parameters + +`username`:: + (string) An identifier for the user. You can specify multiple usernames as a comma-separated list. If you omit this parameter, the API retrieves + information about all users. + +//==== Request Body + +==== Authorization + +To use this API, you must have at least the `manage_security` cluster privilege. + + +==== Examples + +To retrieve a native user, submit a GET request to the `/_xpack/security/user/` +endpoint: + +[source,js] +-------------------------------------------------- +GET /_xpack/security/user/jacknich +-------------------------------------------------- +// CONSOLE +// TEST[setup:jacknich_user] + +A successful call returns an array of users with the JSON representation of the +user. Note that user passwords are not included. + +[source,js] +-------------------------------------------------- +{ + "jacknich": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "metadata": { "intelligence" : 7 }, + "enabled": true + } +} +-------------------------------------------------- +// CONSOLE +// TESTRESPONSE + +If the user is not defined in the `native` realm, the request 404s. + +Omit the username to retrieve all users: + +[source,js] +-------------------------------------------------- +GET /_xpack/security/user +-------------------------------------------------- +// CONSOLE +// TEST[continued] diff --git a/x-pack/docs/en/rest-api/security/role-mapping.asciidoc b/x-pack/docs/en/rest-api/security/role-mapping.asciidoc index 3844e30c62d..c8006346d4e 100644 --- a/x-pack/docs/en/rest-api/security/role-mapping.asciidoc +++ b/x-pack/docs/en/rest-api/security/role-mapping.asciidoc @@ -22,7 +22,7 @@ Role mappings have _rules_ that identify users and a list of _roles_ that are granted to those users. NOTE: This API does not create roles. Rather, it maps users to existing roles. -Roles can be created by using <> or +Roles can be created by using <> or {xpack-ref}/defining-roles.html#roles-management-file[roles files]. The role mapping rule is a logical condition that is expressed using a JSON DSL. diff --git a/x-pack/docs/en/rest-api/security/roles.asciidoc b/x-pack/docs/en/rest-api/security/roles.asciidoc deleted file mode 100644 index 28c09c560ec..00000000000 --- a/x-pack/docs/en/rest-api/security/roles.asciidoc +++ /dev/null @@ -1,205 +0,0 @@ -[role="xpack"] -[[security-api-roles]] -=== Role Management APIs - -The Roles API enables you to add, remove, and retrieve roles in the `native` -realm. - -==== Request - -`GET /_xpack/security/role` + - -`GET /_xpack/security/role/` + - -`DELETE /_xpack/security/role/` + - -`POST /_xpack/security/role//_clear_cache` + - -`POST /_xpack/security/role/` + - -`PUT /_xpack/security/role/` - - -==== Description - -The Roles API is generally the preferred way to manage roles, rather than using -file-based role management. For more information, see -{xpack-ref}/authorization.html[Configuring Role-based Access Control]. - - -==== Path Parameters - -`name`:: - (string) The name of the role. If you do not specify this parameter, the - Get Roles API returns information about all roles. - - -==== Request Body - -The following parameters can be specified in the body of a PUT or POST request -and pertain to adding a role: - -`cluster`:: (list) A list of cluster privileges. These privileges define the -cluster level actions that users with this role are able to execute. - -`indices`:: (list) A list of indices permissions entries. -`field_security`::: (list) The document fields that the owners of the role have -read access to. For more information, see -{xpack-ref}/field-and-document-access-control.html[Setting Up Field and Document Level Security]. -`names` (required)::: (list) A list of indices (or index name patterns) to which the -permissions in this entry apply. -`privileges`(required)::: (list) The index level privileges that the owners of the role -have on the specified indices. -`query`::: A search query that defines the documents the owners of the role have -read access to. A document within the specified indices must match this query in -order for it to be accessible by the owners of the role. - -`metadata`:: (object) Optional meta-data. Within the `metadata` object, keys -that begin with `_` are reserved for system usage. - -`run_as`:: (list) A list of users that the owners of this role can impersonate. -For more information, see -{xpack-ref}/run-as-privilege.html[Submitting Requests on Behalf of Other Users]. - -For more information, see {xpack-ref}/defining-roles.html[Defining Roles]. - - -==== Authorization - -To use this API, you must have at least the `manage_security` cluster -privilege. - - -==== Examples - -[[security-api-put-role]] -To add a role, submit a PUT or POST request to the `/_xpack/security/role/` -endpoint: - -[source,js] --------------------------------------------------- -POST /_xpack/security/role/my_admin_role -{ - "cluster": ["all"], - "indices": [ - { - "names": [ "index1", "index2" ], - "privileges": ["all"], - "field_security" : { // optional - "grant" : [ "title", "body" ] - }, - "query": "{\"match\": {\"title\": \"foo\"}}" // optional - } - ], - "run_as": [ "other_user" ], // optional - "metadata" : { // optional - "version" : 1 - } -} --------------------------------------------------- -// CONSOLE - -A successful call returns a JSON structure that shows whether the role has been -created or updated. - -[source,js] --------------------------------------------------- -{ - "role": { - "created": true <1> - } -} --------------------------------------------------- -// TESTRESPONSE -<1> When an existing role is updated, `created` is set to false. - -[[security-api-get-role]] -To retrieve a role from the `native` Security realm, issue a GET request to the -`/_xpack/security/role/` endpoint: - -[source,js] --------------------------------------------------- -GET /_xpack/security/role/my_admin_role --------------------------------------------------- -// CONSOLE -// TEST[continued] - -A successful call returns an array of roles with the JSON representation of the -role. If the role is not defined in the `native` realm, the request 404s. - -[source,js] --------------------------------------------------- -{ - "my_admin_role": { - "cluster" : [ "all" ], - "indices" : [ { - "names" : [ "index1", "index2" ], - "privileges" : [ "all" ], - "field_security" : { - "grant" : [ "title", "body" ] - }, - "query" : "{\"match\": {\"title\": \"foo\"}}" - } ], - "applications" : [ ], - "run_as" : [ "other_user" ], - "metadata" : { - "version" : 1 - }, - "transient_metadata": { - "enabled": true - } - } -} --------------------------------------------------- -// TESTRESPONSE - -You can specify multiple roles as a comma-separated list. To retrieve all roles, -omit the role name. - -[source,js] --------------------------------------------------- -# Retrieve roles "r1", "r2", and "my_admin_role" -GET /_xpack/security/role/r1,r2,my_admin_role - -# Retrieve all roles -GET /_xpack/security/role --------------------------------------------------- -// CONSOLE -// TEST[continued] - -NOTE: If single role is requested, that role is returned as the response. When -requesting multiple roles, an object is returned holding the found roles, each -keyed by the relevant role name. - -[[security-api-delete-role]] -To delete a role, submit a DELETE request to the `/_xpack/security/role/` -endpoint: - -[source,js] --------------------------------------------------- -DELETE /_xpack/security/role/my_admin_role --------------------------------------------------- -// CONSOLE -// TEST[continued] - -If the role is successfully deleted, the request returns `{"found": true}`. -Otherwise, `found` is set to false. - -[source,js] --------------------------------------------------- -{ - "found" : true -} --------------------------------------------------- -// TESTRESPONSE - -[[security-api-clear-role-cache]] -The Clear Roles Cache API evicts roles from the native role cache. To clear the -cache for a role, submit a POST request `/_xpack/security/role//_clear_cache` -endpoint: - -[source,js] --------------------------------------------------- -POST /_xpack/security/role/my_admin_role/_clear_cache --------------------------------------------------- -// CONSOLE diff --git a/x-pack/docs/en/rest-api/security/users.asciidoc b/x-pack/docs/en/rest-api/security/users.asciidoc deleted file mode 100644 index c84da5c7d75..00000000000 --- a/x-pack/docs/en/rest-api/security/users.asciidoc +++ /dev/null @@ -1,226 +0,0 @@ -[role="xpack"] -[[security-api-users]] -=== User Management APIs - -The `user` API enables you to create, read, update, and delete users from the -`native` realm. These users are commonly referred to as *native users*. - - -==== Request - -`GET /_xpack/security/user` + - -`GET /_xpack/security/user/` + - -`DELETE /_xpack/security/user/` + - -`POST /_xpack/security/user/` + - -`PUT /_xpack/security/user/` + - -`PUT /_xpack/security/user//_disable` + - -`PUT /_xpack/security/user//_enable` + - -`PUT /_xpack/security/user//_password` - - -==== Description - -You can use the PUT user API to create or update users. When updating a user, -you can update everything but its `username` and `password`. To change a user's -password, use the <>. - -[[username-validation]] -NOTE: Usernames must be at least 1 and no more than 1024 characters. They can -contain alphanumeric characters (`a-z`, `A-Z`, `0-9`), spaces, punctuation, and -printable symbols in the https://en.wikipedia.org/wiki/Basic_Latin_(Unicode_block)[Basic Latin (ASCII) block]. -Leading or trailing whitespace is not allowed. - -==== Path Parameters - -`username`:: - (string) An identifier for the user. If you omit this parameter from a Get - User API request, it retrieves information about all users. - - -==== Request Body - -The following parameters can be specified in the body of a POST or PUT request -and pertain to creating a user: - -`enabled`:: -(boolean) Specifies whether the user is enabled. The default value is `true`. - -`email`:: -(string) The email of the user. - -`full_name`:: -(string) The full name of the user. - -`metadata`:: -(object) Arbitrary metadata that you want to associate with the user. - -`password` (required):: -(string) The user's password. Passwords must be at least 6 characters long. - -`roles` (required):: -(list) A set of roles the user has. The roles determine the user's access -permissions. To create a user without any roles, specify an empty list: `[]`. - -==== Authorization - -To use this API, you must have at least the `manage_security` cluster privilege. - - -==== Examples - -[[security-api-put-user]] -To add a user, submit a PUT or POST request to the `/_xpack/security/user/` -endpoint. - -[source,js] --------------------------------------------------- -POST /_xpack/security/user/jacknich -{ - "password" : "j@rV1s", - "roles" : [ "admin", "other_role1" ], - "full_name" : "Jack Nicholson", - "email" : "jacknich@example.com", - "metadata" : { - "intelligence" : 7 - } -} --------------------------------------------------- -// CONSOLE - -A successful call returns a JSON structure that shows whether the user has been -created or updated. - -[source,js] --------------------------------------------------- -{ - "user": { - "created" : true <1> - } -} --------------------------------------------------- -// TESTRESPONSE -<1> When an existing user is updated, `created` is set to false. - -After you add a user through the Users API, requests from that user can be -authenticated. For example: - -[source,shell] --------------------------------------------------- -curl -u jacknich:j@rV1s http://localhost:9200/_cluster/health --------------------------------------------------- -// NOTCONSOLE - -[[security-api-get-user]] -To retrieve a native user, submit a GET request to the `/_xpack/security/user/` -endpoint: - -[source,js] --------------------------------------------------- -GET /_xpack/security/user/jacknich --------------------------------------------------- -// CONSOLE -// TEST[continued] - -A successful call returns an array of users with the JSON representation of the -user. Note that user passwords are not included. - -[source,js] --------------------------------------------------- -{ - "jacknich": { <1> - "username" : "jacknich", - "roles" : [ "admin", "other_role1" ], - "full_name" : "Jack Nicholson", - "email" : "jacknich@example.com", - "enabled": true, - "metadata" : { - "intelligence" : 7 - } - } -} --------------------------------------------------- -// TESTRESPONSE -<1> If the user is not defined in the `native` realm, the request 404s. - -You can specify multiple usernames as a comma-separated list: - -[source,js] --------------------------------------------------- -GET /_xpack/security/user/jacknich,rdinero --------------------------------------------------- -// CONSOLE -// TEST[continued] - -Omit the username to retrieve all users: - -[source,js] --------------------------------------------------- -GET /_xpack/security/user --------------------------------------------------- -// CONSOLE -// TEST[continued] - -[[security-api-reset-user-password]] -To reset the password for a user, submit a PUT request to the -`/_xpack/security/user//_password` endpoint: - -[source,js] --------------------------------------------------- -PUT /_xpack/security/user/jacknich/_password -{ - "password" : "s3cr3t" -} --------------------------------------------------- -// CONSOLE -// TEST[continued] - -[[security-api-disable-user]] -To disable a user, submit a PUT request to the -`/_xpack/security/user//_disable` endpoint: - -[source,js] --------------------------------------------------- -PUT /_xpack/security/user/jacknich/_disable --------------------------------------------------- -// CONSOLE -// TEST[continued] - -[[security-api-enable-user]] -To enable a user, submit a PUT request to the -`/_xpack/security/user//_enable` endpoint: - -[source,js] --------------------------------------------------- -PUT /_xpack/security/user/jacknich/_enable --------------------------------------------------- -// CONSOLE -// TEST[continued] - -[[security-api-delete-user]] -To delete a user, submit a DELETE request to the `/_xpack/security/user/` -endpoint: - -[source,js] --------------------------------------------------- -DELETE /_xpack/security/user/jacknich --------------------------------------------------- -// CONSOLE -// TEST[continued] - -If the user is successfully deleted, the request returns `{"found": true}`. -Otherwise, `found` is set to false. - -[source,js] --------------------------------------------------- -{ - "found" : true -} --------------------------------------------------- -// TESTRESPONSE diff --git a/x-pack/docs/en/rollup/api-quickref.asciidoc b/x-pack/docs/en/rollup/api-quickref.asciidoc index 937c6a84e5e..5e99f1c6984 100644 --- a/x-pack/docs/en/rollup/api-quickref.asciidoc +++ b/x-pack/docs/en/rollup/api-quickref.asciidoc @@ -15,18 +15,19 @@ Most {rollup} endpoints have the following base: [[rollup-api-jobs]] === /job/ -* {ref}/rollup-put-job.html[PUT /job/+++]: Create a job -* {ref}/rollup-get-job.html[GET /job]: List jobs -* {ref}/rollup-get-job.html[GET /job/+++]: Get job details -* {ref}/rollup-start-job.html[POST /job//_start]: Start a job -* {ref}/rollup-stop-job.html[POST /job//_stop]: Stop a job -* {ref}/rollup-delete-job.html[DELETE /job/+++]: Delete a job +* {ref}/rollup-put-job.html[PUT /_xpack/rollup/job/+++]: Create a job +* {ref}/rollup-get-job.html[GET /_xpack/rollup/job]: List jobs +* {ref}/rollup-get-job.html[GET /_xpack/rollup/job/+++]: Get job details +* {ref}/rollup-start-job.html[POST /_xpack/rollup/job//_start]: Start a job +* {ref}/rollup-stop-job.html[POST /_xpack/rollup/job//_stop]: Stop a job +* {ref}/rollup-delete-job.html[DELETE /_xpack/rollup/job/+++]: Delete a job [float] [[rollup-api-data]] === /data/ -* {ref}/rollup-get-rollup-caps.html[GET /data//_rollup_caps+++]: Get Rollup Capabilities +* {ref}/rollup-get-rollup-caps.html[GET /_xpack/rollup/data//_rollup_caps+++]: Get Rollup Capabilities +* {ref}/rollup-get-rollup-index-caps.html[GET //_rollup/data/+++]: Get Rollup Index Capabilities [float] [[rollup-api-index]] diff --git a/x-pack/docs/en/rollup/overview.asciidoc b/x-pack/docs/en/rollup/overview.asciidoc index a3f29f23bd1..a9a983fbecc 100644 --- a/x-pack/docs/en/rollup/overview.asciidoc +++ b/x-pack/docs/en/rollup/overview.asciidoc @@ -20,6 +20,7 @@ So while the cost of storing a millisecond of sensor data from ten years ago is reading often diminishes with time. It's not useless -- it could easily contribute to a useful analysis -- but it's reduced value often leads to deletion rather than paying the fixed storage cost. +[float] === Rollup store historical data at reduced granularity That's where Rollup comes into play. The Rollup functionality summarizes old, high-granularity data into a reduced @@ -35,6 +36,7 @@ automates this process of summarizing historical data. Details about setting up and configuring Rollup are covered in <> +[float] === Rollup uses standard query DSL The Rollup feature exposes a new search endpoint (`/_rollup_search` vs the standard `/_search`) which knows how to search @@ -48,6 +50,7 @@ are covered more in <>. But if your queries, aggregations and dashboards only use the available functionality, redirecting them to historical data is trivial. +[float] === Rollup merges "live" and "rolled" data A useful feature of Rollup is the ability to query both "live", realtime data in addition to historical "rolled" data @@ -61,6 +64,7 @@ would only see data older than a month. The RollupSearch endpoint, however, sup It will take the results from both data sources and merge them together. If there is overlap between the "live" and "rolled" data, live data is preferred to increase accuracy. +[float] === Rollup is multi-interval aware Finally, Rollup is capable of intelligently utilizing the best interval available. If you've worked with summarizing diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/x-pack/docs/en/security/authorization/managing-roles.asciidoc index b8a01aa4519..f550c900edc 100644 --- a/x-pack/docs/en/security/authorization/managing-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/managing-roles.asciidoc @@ -130,7 +130,7 @@ manage roles, log in to {kib} and go to *Management / Elasticsearch / Roles*. The _Role Management APIs_ enable you to add, update, remove and retrieve roles dynamically. When you use the APIs to manage roles in the `native` realm, the roles are stored in an internal {es} index. For more information and examples, -see {ref}/security-api-roles.html[Role Management APIs]. +see {ref}/security-api.html#security-role-apis[role management APIs]. [float] [[roles-management-file]] diff --git a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc index cf8373a65f3..36f3a1f27f3 100644 --- a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc @@ -18,7 +18,7 @@ the API, and other roles that are mapped through files. When you use role-mappings, you assign existing roles to users. The available roles should either be added using the -{ref}/security-api-roles.html[Role Management APIs] or defined in the +{ref}/security-api.html#security-role-apis[role management APIs] or defined in the <>. Either role-mapping method can use either role management method. For example, when you use the role mapping API, you are able to map users to both API-managed roles and file-managed roles diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index 5e8f1adbc7a..53f36afc734 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -9,7 +9,7 @@ password-protect your data as well as implement more advanced security measures such as encrypting communications, role-based access control, IP filtering, and auditing. For more information, see -{xpack-ref}/xpack-security.html[Securing the Elastic Stack]. +{xpack-ref}/elasticsearch-security.html[Securing the Elastic Stack]. To use {security} in {es}: diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java index 135ad755359..9068ffda4de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java @@ -162,9 +162,8 @@ public class AnalysisConfig implements ToXContentObject, Writeable { } // BWC for removed per-partition normalization - // Version check is temporarily against the latest to satisfy CI tests - // TODO change to V_6_5_0 after successful backport to 6.x - if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + // TODO Remove in 7.0.0 + if (in.getVersion().before(Version.V_6_5_0)) { in.readBoolean(); } } @@ -197,9 +196,8 @@ public class AnalysisConfig implements ToXContentObject, Writeable { } // BWC for removed per-partition normalization - // Version check is temporarily against the latest to satisfy CI tests - // TODO change to V_6_5_0 after successful backport to 6.x - if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + // TODO Remove in 7.0.0 + if (out.getVersion().before(Version.V_6_5_0)) { out.writeBoolean(false); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java index 380f540a317..cdfd9bad7f1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java @@ -258,7 +258,7 @@ public class JobUpdate implements Writeable, ToXContentObject { } public boolean isAutodetectProcessUpdate() { - return modelPlotConfig != null || detectorUpdates != null; + return modelPlotConfig != null || detectorUpdates != null || groups != null; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java index b8c2685d28a..562e22a1eb9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivileg import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Objects; @@ -34,32 +33,6 @@ public final class PutPrivilegesRequestBuilder extends ActionRequestBuilder * *

- * The amount of work increases exponentially (2**log_rounds), so + * The amount of work increases exponentially (2**log_rounds), so * each increment is twice as much work. The default log_rounds is * 10, and the valid range is 4 to 30. * @@ -689,7 +690,11 @@ public class BCrypt { // the next lines are the SecureString replacement for the above commented-out section if (minor >= 'a') { - try (SecureString secureString = new SecureString(CharArrays.concat(password.getChars(), "\000".toCharArray()))) { + final char[] suffix = "\000".toCharArray(); + final char[] result = new char[password.length() + suffix.length]; + System.arraycopy(password.getChars(), 0, result, 0, password.length()); + System.arraycopy(suffix, 0, result, password.length(), suffix.length); + try (SecureString secureString = new SecureString(result)) { passwordb = CharArrays.toUtf8Bytes(secureString.getChars()); } } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CharArrays.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CharArrays.java deleted file mode 100644 index 26df90c31a2..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CharArrays.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.security.authc.support; - -import java.nio.ByteBuffer; -import java.nio.CharBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; - -/** - * Helper class similar to Arrays to handle conversions for Char arrays - */ -public class CharArrays { - - public static char[] utf8BytesToChars(byte[] utf8Bytes) { - ByteBuffer byteBuffer = ByteBuffer.wrap(utf8Bytes); - CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); - char[] chars = Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit()); - byteBuffer.clear(); - charBuffer.clear(); - return chars; - } - - /** - * Like String.indexOf for for an array of chars - */ - static int indexOf(char[] array, char ch) { - for (int i = 0; (i < array.length); i++) { - if (array[i] == ch) { - return i; - } - } - return -1; - } - - /** - * Converts the provided char[] to a UTF-8 byte[]. The provided char[] is not modified by this - * method, so the caller needs to take care of clearing the value if it is sensitive. - */ - public static byte[] toUtf8Bytes(char[] chars) { - CharBuffer charBuffer = CharBuffer.wrap(chars); - ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(charBuffer); - byte[] bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); - Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data - return bytes; - } - - public static boolean charsBeginsWith(String prefix, char[] chars) { - if (chars == null || prefix == null) { - return false; - } - - if (prefix.length() > chars.length) { - return false; - } - - for (int i = 0; i < prefix.length(); i++) { - if (chars[i] != prefix.charAt(i)) { - return false; - } - } - - return true; - } - - public static boolean constantTimeEquals(char[] a, char[] b) { - if (a.length != b.length) { - return false; - } - - int equals = 0; - for (int i = 0; i < a.length; i++) { - equals |= a[i] ^ b[i]; - } - - return equals == 0; - } - - public static boolean constantTimeEquals(String a, String b) { - if (a.length() != b.length()) { - return false; - } - - int equals = 0; - for (int i = 0; i < a.length(); i++) { - equals |= a.charAt(i) ^ b.charAt(i); - } - - return equals == 0; - } - - public static char[] concat(char[] a, char[] b) { - final char[] result = new char[a.length + b.length]; - System.arraycopy(a, 0, result, 0, a.length); - System.arraycopy(b, 0, result, a.length, b.length); - return result; - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java index d12547bd906..492622b2c51 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.security.authc.support; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.SecureString; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java index d8e58c29d23..13493036008 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.security.authc.support; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -107,7 +108,7 @@ public class UsernamePasswordToken implements AuthenticationToken { throw authenticationError("invalid basic authentication header encoding", e); } - int i = CharArrays.indexOf(userpasswd, ':'); + int i = indexOfColon(userpasswd); if (i < 0) { throw authenticationError("invalid basic authentication header value"); } @@ -121,4 +122,15 @@ public class UsernamePasswordToken implements AuthenticationToken { context.putHeader(BASIC_AUTH_HEADER, basicAuthHeaderValue(token.username, token.password)); } + /** + * Like String.indexOf for for an array of chars + */ + private static int indexOfColon(char[] array) { + for (int i = 0; (i < array.length); i++) { + if (array[i] == ':') { + return i; + } + } + return -1; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java index e1d3a2db8e9..d3cc60194f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java @@ -292,12 +292,6 @@ public class SecurityClient { return new GetPrivilegesRequestBuilder(client, GetPrivilegesAction.INSTANCE).application(applicationName).privileges(privileges); } - public PutPrivilegesRequestBuilder preparePutPrivilege(String applicationName, String privilegeName, - BytesReference bytesReference, XContentType xContentType) throws IOException { - return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE) - .source(applicationName, privilegeName, bytesReference, xContentType); - } - public PutPrivilegesRequestBuilder preparePutPrivileges(BytesReference bytesReference, XContentType xContentType) throws IOException { return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE).source(bytesReference, xContentType); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java index d959c017e0a..a3814a76a3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.core.ssl; import org.elasticsearch.common.hash.MessageDigests; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import java.io.BufferedReader; import java.io.IOException; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java index b1f3a32769e..a25e79ffdf6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.watcher.WatcherField; import org.elasticsearch.xpack.core.security.SecurityField; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index ffc13655d22..3030449abd1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -100,7 +100,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase { assertTrue(update.isAutodetectProcessUpdate()); update = new JobUpdate.Builder("foo").setDetectorUpdates(Collections.singletonList(mock(JobUpdate.DetectorUpdate.class))).build(); assertTrue(update.isAutodetectProcessUpdate()); + update = new JobUpdate.Builder("foo").setGroups(Arrays.asList("bar")).build(); + assertTrue(update.isAutodetectProcessUpdate()); } public void testUpdateAnalysisLimitWithValueGreaterThanMax() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index 38857e2170d..dca2f37f3f2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -80,9 +80,8 @@ public class DocumentSubsetReaderTests extends ESTestCase { bitsetFilterCache.close(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32457") public void testSearch() throws Exception { - IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig()); + IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random()))); Document document = new Document(); document.add(new StringField("field", "value1", Field.Store.NO)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java index 127fb18e5ff..2d338890f9f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java @@ -66,6 +66,7 @@ public final class UpdateParams { return new Builder(jobUpdate.getJobId()) .modelPlotConfig(jobUpdate.getModelPlotConfig()) .detectorUpdates(jobUpdate.getDetectorUpdates()) + .updateScheduledEvents(jobUpdate.getGroups() != null) .build(); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParamsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParamsTests.java new file mode 100644 index 00000000000..2683c1131f5 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParamsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process.autodetect; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; +import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; +import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; +import org.elasticsearch.xpack.core.ml.job.config.Operator; +import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + + +public class UpdateParamsTests extends ESTestCase { + + public void testFromJobUpdate() { + String jobId = "foo"; + DetectionRule rule = new DetectionRule.Builder(Arrays.asList( + new RuleCondition(RuleCondition.AppliesTo.ACTUAL, + Operator.GT, 1.0))).build(); + List rules = Arrays.asList(rule); + List detectorUpdates = Collections.singletonList( + new JobUpdate.DetectorUpdate(2, null, rules)); + JobUpdate.Builder updateBuilder = new JobUpdate.Builder(jobId) + .setModelPlotConfig(new ModelPlotConfig()) + .setDetectorUpdates(detectorUpdates); + + UpdateParams params = UpdateParams.fromJobUpdate(updateBuilder.build()); + + assertFalse(params.isUpdateScheduledEvents()); + assertEquals(params.getDetectorUpdates(), updateBuilder.build().getDetectorUpdates()); + assertEquals(params.getModelPlotConfig(), updateBuilder.build().getModelPlotConfig()); + + params = UpdateParams.fromJobUpdate(updateBuilder.setGroups(Arrays.asList("bar")).build()); + + assertTrue(params.isUpdateScheduledEvents()); + } + +} diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json index c0a13ea63a6..e1f418d5a8d 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json @@ -145,7 +145,7 @@ }, "transform": { "script": { - "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def state = ctx.payload.check.hits.hits[0]._source.cluster_state.status;if (ctx.vars.not_resolved){ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check == false) {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = ['timestamp': ctx.execution_time, 'metadata': ctx.metadata.xpack];}if (ctx.vars.fails_check) {ctx.payload.prefix = 'Elasticsearch cluster status is ' + state + '.';if (state == 'red') {ctx.payload.message = 'Allocate missing primary shards and replica shards.';ctx.payload.metadata.severity = 2100;} else {ctx.payload.message = 'Allocate missing replica shards.';ctx.payload.metadata.severity = 1100;}}ctx.vars.state = state.toUpperCase();ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" + "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def state = ctx.payload.check.hits.hits[0]._source.cluster_state.status;if (ctx.vars.not_resolved){ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check == false) {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = ['timestamp': ctx.execution_time, 'metadata': ctx.metadata.xpack];}if (ctx.vars.fails_check) {ctx.payload.prefix = 'Elasticsearch cluster status is ' + state + '.';if (state == 'red') {ctx.payload.message = 'Allocate missing primary shards and replica shards.';ctx.payload.metadata.severity = 2100;} else {ctx.payload.message = 'Allocate missing replica shards.';ctx.payload.metadata.severity = 1100;}}ctx.vars.state = state.toUpperCase();ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" } }, "actions": { diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json index a6bf7b6145c..5c0cb7f55b4 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json @@ -151,7 +151,7 @@ }, "transform": { "script": { - "source": "void formatResults(StringBuilder message, String type, Map typeMap) {if (typeMap.empty == false) {message.append(' Node');if (typeMap.size() != 1) {message.append('s were');} else {message.append(' was');}message.append(' ').append(type).append(' [').append(typeMap.size()).append(']: ').append(typeMap.values().stream().collect(Collectors.joining(', ', '[', ']'))).append('.');}}ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;def clusterState = ctx.payload.check.hits.hits[0]._source.cluster_state;def persistentUuidToName = [:];def latestNodes = clusterState.nodes;def ephemeralUuidToPersistentUuid = [:];def payload = ['timestamp': ctx.execution_time,'updated_timestamp': ctx.execution_time,'resolved_timestamp': ctx.execution_time,'metadata': ctx.metadata.xpack,'prefix': 'Elasticsearch cluster nodes have changed!','nodes': ['hash': clusterState.nodes_hash,'added': persistentUuidToName,'removed': [:],'restarted': [:]]];for (def latestNode : latestNodes.entrySet()) {persistentUuidToName[latestNode.key] = latestNode.value.name;ephemeralUuidToPersistentUuid[latestNode.value.ephemeral_id] = latestNode.key;}def previousNodes = ctx.payload.check.hits.hits[1]._source.cluster_state.nodes;def previousPersistentUuidToName = [:];for (def previousNode : previousNodes.entrySet()){if (persistentUuidToName.containsKey(previousNode.key) == false){payload.nodes.removed[previousNode.key] = previousNode.value.name;}else{if (ephemeralUuidToPersistentUuid.containsKey(previousNode.value.ephemeral_id) == false) {payload.nodes.restarted[previousNode.key] = persistentUuidToName[previousNode.key];}persistentUuidToName.remove(previousNode.key);}}StringBuilder message = new StringBuilder();formatResults(message, 'removed', payload.nodes.removed);formatResults(message, 'added', payload.nodes.added);formatResults(message, 'restarted', payload.nodes.restarted);payload.message = message.toString().trim();return payload;" + "source": "void formatResults(StringBuilder message, String type, Map typeMap) {if (typeMap.empty == false) {message.append(' Node');if (typeMap.size() != 1) {message.append('s were');} else {message.append(' was');}message.append(' ').append(type).append(' [').append(typeMap.size()).append(']: ').append(typeMap.values().stream().collect(Collectors.joining(', ', '[', ']'))).append('.');}}ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;def clusterState = ctx.payload.check.hits.hits[0]._source.cluster_state;def persistentUuidToName = [:];def latestNodes = clusterState.nodes;def ephemeralUuidToPersistentUuid = [:];def payload = ['timestamp': ctx.execution_time,'updated_timestamp': ctx.execution_time,'resolved_timestamp': ctx.execution_time,'metadata': ctx.metadata.xpack,'prefix': 'Elasticsearch cluster nodes have changed!','nodes': ['hash': clusterState.nodes_hash,'added': persistentUuidToName,'removed': [:],'restarted': [:]]];for (def latestNode : latestNodes.entrySet()) {persistentUuidToName[latestNode.key] = latestNode.value.name;ephemeralUuidToPersistentUuid[latestNode.value.ephemeral_id] = latestNode.key;}def previousNodes = ctx.payload.check.hits.hits[1]._source.cluster_state.nodes;def previousPersistentUuidToName = [:];for (def previousNode : previousNodes.entrySet()){if (persistentUuidToName.containsKey(previousNode.key) == false){payload.nodes.removed[previousNode.key] = previousNode.value.name;}else{if (ephemeralUuidToPersistentUuid.containsKey(previousNode.value.ephemeral_id) == false) {payload.nodes.restarted[previousNode.key] = persistentUuidToName[previousNode.key];}persistentUuidToName.remove(previousNode.key);}}StringBuilder message = new StringBuilder();formatResults(message, 'removed', payload.nodes.removed);formatResults(message, 'added', payload.nodes.added);formatResults(message, 'restarted', payload.nodes.restarted);payload.message = message.toString().trim();return payload;" } }, "actions": { diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json index 7e18c981f0f..051a3a9d409 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json @@ -141,7 +141,7 @@ }, "transform": { "script": { - "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {def versions = new ArrayList(ctx.payload.check.hits.hits[0]._source.cluster_stats.nodes.versions);Collections.sort(versions);versionMessage = 'Versions: [' + String.join(', ', versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Elasticsearch.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" + "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {def versions = new ArrayList(ctx.payload.check.hits.hits[0]._source.cluster_stats.nodes.versions);Collections.sort(versions);versionMessage = 'Versions: [' + String.join(', ', versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Elasticsearch.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" } }, "actions": { diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json index bf2da3ffb1d..b2acba610e1 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json @@ -161,7 +161,7 @@ }, "transform": { "script": { - "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Kibana.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" + "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Kibana.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" } }, "actions": { diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json index 71a0cfd46bf..cf1fdde606c 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json @@ -161,7 +161,7 @@ }, "transform": { "script": { - "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Logstash.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" + "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Logstash.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" } }, "actions": { diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json index a05198a15eb..7eb0d59167d 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json @@ -134,7 +134,7 @@ }, "transform": { "script": { - "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def alertMessage = null;if (ctx.vars.fails_check) { alertMessage = 'Update your license.';} if (ctx.vars.not_resolved) { ctx.payload = ctx.payload.alert.hits.hits[0]._source;ctx.payload.metadata = ctx.metadata.xpack;if (ctx.vars.fails_check == false) { ctx.payload.resolved_timestamp = ctx.execution_time;} } else { ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster\\'s license is going to expire in {{#relativeTime}}metadata.time{{/relativeTime}} at {{#absoluteTime}}metadata.time{{/absoluteTime}}.', 'message': alertMessage, 'metadata': ctx.metadata.xpack ];} if (ctx.vars.fails_check) { ctx.payload.metadata.time = ctx.vars.expiry.toString();} ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" + "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def alertMessage = null;if (ctx.vars.fails_check) { alertMessage = 'Update your license.';} if (ctx.vars.not_resolved) { ctx.payload = ctx.payload.alert.hits.hits[0]._source;ctx.payload.metadata = ctx.metadata.xpack;if (ctx.vars.fails_check == false) { ctx.payload.resolved_timestamp = ctx.execution_time;} } else { ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster\\'s license is going to expire in {{#relativeTime}}metadata.time{{/relativeTime}} at {{#absoluteTime}}metadata.time{{/absoluteTime}}.', 'message': alertMessage, 'metadata': ctx.metadata.xpack ];} if (ctx.vars.fails_check) { ctx.payload.metadata.time = ctx.vars.expiry.toString();} ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" } }, "actions": { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index d31ffae13f2..02910b5dd74 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -191,7 +191,6 @@ import org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestDeletePrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestGetPrivilegesAction; -import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegeAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; @@ -302,7 +301,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw new TLSLicenseBootstrapCheck(), new FIPS140SecureSettingsBootstrapCheck(settings, env), new FIPS140JKSKeystoreBootstrapCheck(settings), - new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings))); + new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings), + new FIPS140LicenseBootstrapCheck(XPackSettings.FIPS_MODE_ENABLED.get(settings)))); checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); this.bootstrapChecks = Collections.unmodifiableList(checks); Automatons.updateMaxDeterminizedStates(settings); @@ -762,7 +762,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw new RestSamlInvalidateSessionAction(settings, restController, getLicenseState()), new RestGetPrivilegesAction(settings, restController, getLicenseState()), new RestPutPrivilegesAction(settings, restController, getLicenseState()), - new RestPutPrivilegeAction(settings, restController, getLicenseState()), new RestDeletePrivilegesAction(settings, restController, getLicenseState()) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java index d175e1b2293..8107d748818 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java @@ -32,7 +32,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.ldap.support.LdapMetaDataResolver; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java index 36d14aa67c0..70b2f0015cf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java @@ -19,7 +19,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapSessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.SearchGroupsResolverSettings; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.ldap.support.LdapMetaDataResolver; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java index 2ec87888d8c..a3541ec2759 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapUserSearchSessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.SearchGroupsResolverSettings; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java index 367bd525036..986fa1900e7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java @@ -25,7 +25,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.ldap.support.LdapMetaDataResolver; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 2247cbe02a8..c388fd5627c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -30,7 +30,6 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; -import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -429,7 +428,7 @@ class IndicesAndAliasesResolver { } @Override - protected void updateRemoteCluster(String clusterAlias, List addresses) { + protected void updateRemoteCluster(String clusterAlias, List addresses) { if (addresses.isEmpty()) { clusters.remove(clusterAlias); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegeAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegeAction.java deleted file mode 100644 index 6c3ef8e70fa..00000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegeAction.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.rest.action.privilege; - -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequestBuilder; -import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; -import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; - -import java.io.IOException; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestRequest.Method.PUT; - -/** - * Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index - */ -public class RestPutPrivilegeAction extends SecurityBaseRestHandler { - - public RestPutPrivilegeAction(Settings settings, RestController controller, XPackLicenseState licenseState) { - super(settings, licenseState); - controller.registerHandler(PUT, "/_xpack/security/privilege/{application}/{privilege}", this); - controller.registerHandler(POST, "/_xpack/security/privilege/{application}/{privilege}", this); - } - - @Override - public String getName() { - return "xpack_security_put_privilege_action"; - } - - @Override - public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - final String application = request.param("application"); - final String privilege = request.param("privilege"); - PutPrivilegesRequestBuilder requestBuilder = new SecurityClient(client) - .preparePutPrivilege(application, privilege, request.requiredContent(), request.getXContentType()) - .setRefreshPolicy(request.param("refresh")); - - return RestPutPrivilegesAction.execute(requestBuilder); - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java index eb1104c9bc0..dc565e3f873 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestRequest.Method.PUT; /** * Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index @@ -37,6 +38,7 @@ public class RestPutPrivilegesAction extends SecurityBaseRestHandler { public RestPutPrivilegesAction(Settings settings, RestController controller, XPackLicenseState licenseState) { super(settings, licenseState); + controller.registerHandler(PUT, "/_xpack/security/privilege/", this); controller.registerHandler(POST, "/_xpack/security/privilege/", this); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java index db0548c03ef..2ece398d3d1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java @@ -52,36 +52,6 @@ public class PutPrivilegesRequestBuilderTests extends ESTestCase { return new ApplicationPrivilegeDescriptor(app, name, Sets.newHashSet(actions), Collections.emptyMap()); } - public void testBuildRequestFromJsonObject() throws Exception { - final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); - builder.source("foo", "read", new BytesArray( - "{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }" - ), XContentType.JSON); - final List privileges = builder.request().getPrivileges(); - assertThat(privileges, iterableWithSize(1)); - assertThat(privileges, contains(descriptor("foo", "read", "data:/read/*", "admin:/read/*"))); - } - - public void testPrivilegeNameValidationOfSingleElement() throws Exception { - final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); - final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> - builder.source("foo", "write", new BytesArray( - "{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }" - ), XContentType.JSON)); - assertThat(exception.getMessage(), containsString("write")); - assertThat(exception.getMessage(), containsString("read")); - } - - public void testApplicationNameValidationOfSingleElement() throws Exception { - final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); - final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> - builder.source("bar", "read", new BytesArray( - "{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }" - ), XContentType.JSON)); - assertThat(exception.getMessage(), containsString("foo")); - assertThat(exception.getMessage(), containsString("bar")); - } - public void testPrivilegeNameValidationOfMultipleElement() throws Exception { final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 212ee7ea499..6d75cf09371 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.xpack.core.security.client.SecurityClient; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.BeforeClass; diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json index c94333325b1..d945ebe3247 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json @@ -1,6 +1,6 @@ { "xpack.security.clear_cached_roles": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-clear-role-cache", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-role-cache.html", "methods": [ "POST" ], "url": { "path": "/_xpack/security/role/{name}/_clear_cache", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json index 4351b1bc847..881105d60b8 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json @@ -1,6 +1,6 @@ { "xpack.security.delete_role": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-delete-role", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-role.html", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/role/{name}", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json index d72c854a69d..fa1deb3e1ec 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json @@ -1,6 +1,6 @@ { "xpack.security.delete_user": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-delete-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-user.html", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/user/{username}", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json index 3a72b314191..0e55e82ead6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json @@ -1,6 +1,6 @@ { "xpack.security.disable_user": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-disable-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}/_disable", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json index c68144957f0..da2f67adbea 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json @@ -1,6 +1,6 @@ { "xpack.security.enable_user": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-enable-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}/_enable", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json index 3479c911ccd..67bdbb8a911 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json @@ -1,6 +1,6 @@ { "xpack.security.get_role": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-get-role", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html", "methods": [ "GET" ], "url": { "path": "/_xpack/security/role/{name}", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json index 8020d1ecd6d..0b6f141d10e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json @@ -1,6 +1,6 @@ { "xpack.security.get_token": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-tokens.html#security-api-get-token", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-token.html", "methods": [ "POST" ], "url": { "path": "/_xpack/security/oauth2/token", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json index 910fb7d0645..94dcbca81e1 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json @@ -1,6 +1,6 @@ { "xpack.security.get_user": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-get-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html", "methods": [ "GET" ], "url": { "path": "/_xpack/security/user/{username}", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json index be032c2ffd0..27dd1030914 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json @@ -1,6 +1,6 @@ { "xpack.security.invalidate_token": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-tokens.html#security-api-invalidate-token", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-invalidate-token.html", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/oauth2/token", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privilege.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privilege.json deleted file mode 100644 index 3d453682c64..00000000000 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privilege.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack.security.put_privilege": { - "documentation": "TODO", - "methods": [ "POST", "PUT" ], - "url": { - "path": "/_xpack/security/privilege/{application}/{name}", - "paths": [ "/_xpack/security/privilege/{application}/{name}" ], - "parts": { - "application": { - "type" : "string", - "description" : "Application name", - "required" : true - }, - "name": { - "type" : "string", - "description" : "Privilege name", - "required" : true - } - }, - "params": { - "refresh": { - "type" : "enum", - "options": ["true", "false", "wait_for"], - "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." - } - } - }, - "body": { - "description" : "The privilege to add", - "required" : true - } - } -} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json index 07eb5417158..312db3c9a18 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json @@ -1,7 +1,7 @@ { "xpack.security.put_privileges": { "documentation": "TODO", - "methods": [ "POST" ], + "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/privilege/", "paths": [ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json index 4152975189e..63ef5ee3786 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json @@ -1,6 +1,6 @@ { "xpack.security.put_role": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-put-role", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role.html", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/role/{name}", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json index de07498a409..1b51783a05e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json @@ -1,6 +1,6 @@ { "xpack.security.put_user": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-put-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml index e8dddf21535..30fa3a8d078 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml @@ -30,24 +30,26 @@ teardown: ignore: 404 --- "Test put and get privileges": - # Single privilege, with names in URL + # Single privilege - do: - xpack.security.put_privilege: - application: app - name: p1 + xpack.security.put_privileges: body: > { - "application": "app", - "name": "p1", - "actions": [ "data:read/*" , "action:login" ], - "metadata": { - "key1" : "val1a", - "key2" : "val2a" + "app": { + "p1": { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key1" : "val1a", + "key2" : "val2a" + } + } } } - match: { "app.p1" : { created: true } } - # Multiple privileges, no names in URL + # Multiple privileges - do: xpack.security.put_privileges: body: > @@ -84,18 +86,18 @@ teardown: - match: { "app.p3" : { created: true } } - match: { "app2.p1" : { created: true } } - # Update existing privilege, with names in URL + # Update existing privilege - do: - xpack.security.put_privilege: - application: app - name: p1 + xpack.security.put_privileges: body: > { - "application": "app", - "name": "p1", - "actions": [ "data:read/*" , "action:login" ], - "metadata": { - "key3" : "val3" + "app": { + "p1": { + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key3" : "val3" + } + } } } - match: { "app.p1" : { created: false } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml index b3a1e220690..a7d3fabd2a2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml @@ -31,21 +31,25 @@ setup: } - do: - xpack.security.put_privilege: - application: app-allow - name: read + xpack.security.put_privileges: body: > { - "actions": [ "data:read/*" ] + "app-allow": { + "read": { + "actions": [ "data:read/*" ] + } + } } - do: - xpack.security.put_privilege: - application: app_deny - name: read + xpack.security.put_privileges: body: > { - "actions": [ "data:read/*" ] + "app-deny": { + "read": { + "actions": [ "data:read/*" ] + } + } } --- @@ -82,12 +86,14 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user - xpack.security.put_privilege: - application: app - name: read + xpack.security.put_privileges: body: > { - "actions": [ "data:read/*" ] + "app": { + "read": { + "actions": [ "data:read/*" ] + } + } } - match: { "app.read" : { created: true } } @@ -112,12 +118,14 @@ teardown: "Test put application privileges when not allowed": - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user - xpack.security.put_privilege: - application: app_deny - name: write + xpack.security.put_privileges: body: > { - "actions": [ "data:write/*" ] + "app_deny": { + "write": { + "actions": [ "data:write/*" ] + } + } } catch: forbidden diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/DeleteJobRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/DeleteJobRequest.java new file mode 100644 index 00000000000..1b7450de092 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/DeleteJobRequest.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; + +import java.util.Objects; + +public class DeleteJobRequest extends ActionRequest { + + private String jobId; + private boolean force; + + public DeleteJobRequest(String jobId) { + this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); + } + + public String getJobId() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); + } + + public boolean isForce() { + return force; + } + + public void setForce(boolean force) { + this.force = force; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, force); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || obj.getClass() != getClass()) { + return false; + } + + DeleteJobRequest other = (DeleteJobRequest) obj; + return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force); + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/DeleteJobResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/DeleteJobResponse.java new file mode 100644 index 00000000000..0b4faa38f54 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/DeleteJobResponse.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteJobResponse extends AcknowledgedResponse { + + public DeleteJobResponse(boolean acknowledged) { + super(acknowledged); + } + + public DeleteJobResponse() { + } + + public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { + AcknowledgedResponse response = AcknowledgedResponse.fromXContent(parser); + return new DeleteJobResponse(response.isAcknowledged()); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DeleteJobResponse that = (DeleteJobResponse) other; + return isAcknowledged() == that.isAcknowledged(); + } + + @Override + public int hashCode() { + return Objects.hash(isAcknowledged()); + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java index 00fa1bdd47f..7baaae52a8b 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java @@ -300,6 +300,10 @@ public class AnalysisConfig implements ToXContentObject { multivariateByFields); } + public static Builder builder(List detectors) { + return new Builder(detectors); + } + public static class Builder { private List detectors; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java index 3274b03877f..042d48b7006 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java @@ -265,6 +265,10 @@ public class Detector implements ToXContentObject { excludeFrequent, rules, detectorIndex); } + public static Builder builder() { + return new Builder(); + } + public static class Builder { private String detectorDescription; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java index 6bc1be3b563..59840cfec2a 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java @@ -412,6 +412,10 @@ public class Job implements ToXContentObject { return Strings.toString(this); } + public static Builder builder(String id) { + return new Builder(id); + } + public static class Builder { private String id; @@ -435,7 +439,7 @@ public class Job implements ToXContentObject { private String resultsIndexName; private boolean deleted; - public Builder() { + private Builder() { } public Builder(String id) { diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/DeleteJobRequestTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/DeleteJobRequestTests.java new file mode 100644 index 00000000000..fb8a38fa0c6 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/DeleteJobRequestTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.protocol.xpack.ml.job.config.JobTests; +import org.elasticsearch.test.ESTestCase; + +public class DeleteJobRequestTests extends ESTestCase { + + private DeleteJobRequest createTestInstance() { + return new DeleteJobRequest(JobTests.randomValidJobId()); + } + + public void test_WithNullJobId() { + NullPointerException ex = expectThrows(NullPointerException.class, () -> new DeleteJobRequest(null)); + assertEquals("[job_id] must not be null", ex.getMessage()); + + ex = expectThrows(NullPointerException.class, () -> createTestInstance().setJobId(null)); + assertEquals("[job_id] must not be null", ex.getMessage()); + } + + public void test_WithForce() { + DeleteJobRequest deleteJobRequest = createTestInstance(); + assertFalse(deleteJobRequest.isForce()); + + deleteJobRequest.setForce(true); + assertTrue(deleteJobRequest.isForce()); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/DeleteJobResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/DeleteJobResponseTests.java new file mode 100644 index 00000000000..a73179a0898 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/DeleteJobResponseTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteJobResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteJobResponse createTestInstance() { + return new DeleteJobResponse(); + } + + @Override + protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteJobResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 24303b8342b..6ead87aba61 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -325,6 +325,7 @@ public class FullClusterRestartIT extends ESRestTestCase { } } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/32773") public void testRollupIDSchemeAfterRestart() throws Exception { assumeTrue("Rollup can be tested with 6.3.0 and onwards", oldClusterVersion.onOrAfter(Version.V_6_3_0)); assumeTrue("Rollup ID scheme changed in 6.4", oldClusterVersion.before(Version.V_6_4_0)); diff --git a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle index 928280b6584..e69de29bb2d 100644 --- a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle +++ b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle @@ -1,8 +0,0 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - -// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737 -if (project.inFipsJvm) { - tasks.withType(RestIntegTestTask) { - enabled = false - } -} diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java index 80afdeff82a..cc5a9f4f1b4 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; +import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; @@ -36,7 +37,11 @@ public class BasicRenormalizationIT extends MlNativeAutodetectIntegTestCase { String jobId = "basic-renormalization-it-test-default-renormalization-job"; createAndRunJob(jobId, null); - List records = getRecords(jobId); + GetRecordsAction.Request getRecordsRequest = new GetRecordsAction.Request(jobId); + // Setting the record score to 10.0, to avoid the low score records due to multibucket trailing effect + getRecordsRequest.setRecordScore(10.0); + + List records = getRecords(getRecordsRequest); assertThat(records.size(), equalTo(2)); AnomalyRecord laterRecord = records.get(0); assertThat(laterRecord.getActual().get(0), equalTo(100.0)); diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java index 6703e4ef236..fb261908e2c 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java @@ -12,11 +12,13 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; +import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.junit.After; @@ -193,9 +195,9 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase { /** * Test an open job picks up changes to scheduled events/calendars */ - public void testOnlineUpdate() throws Exception { + public void testAddEventsToOpenJob() throws Exception { TimeValue bucketSpan = TimeValue.timeValueMinutes(30); - Job.Builder job = createJob("scheduled-events-online-update", bucketSpan); + Job.Builder job = createJob("scheduled-events-add-events-to-open-job", bucketSpan); long startTime = 1514764800000L; final int bucketCount = 5; @@ -209,7 +211,7 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase { // Now create a calendar and events for the job while it is open String calendarId = "test-calendar-online-update"; - putCalendar(calendarId, Collections.singletonList(job.getId()), "testOnlineUpdate calendar"); + putCalendar(calendarId, Collections.singletonList(job.getId()), "testAddEventsToOpenJob calendar"); List events = new ArrayList<>(); long eventStartTime = startTime + (bucketCount + 1) * bucketSpan.millis(); @@ -257,6 +259,81 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase { assertEquals(0, buckets.get(8).getScheduledEvents().size()); } + /** + * An open job that later gets added to a calendar, should take the scheduled events into account + */ + public void testAddOpenedJobToGroupWithCalendar() throws Exception { + TimeValue bucketSpan = TimeValue.timeValueMinutes(30); + String groupName = "opened-calendar-job-group"; + Job.Builder job = createJob("scheduled-events-add-opened-job-to-group-with-calendar", bucketSpan); + + long startTime = 1514764800000L; + final int bucketCount = 5; + + // Open the job + openJob(job.getId()); + + // write some buckets of data + postData(job.getId(), generateData(startTime, bucketSpan, bucketCount, bucketIndex -> randomIntBetween(100, 200)) + .stream().collect(Collectors.joining())); + + String calendarId = "test-calendar-open-job-update"; + + // Create a new calendar referencing groupName + putCalendar(calendarId, Collections.singletonList(groupName), "testAddOpenedJobToGroupWithCalendar calendar"); + + // Put events in the calendar + List events = new ArrayList<>(); + long eventStartTime = startTime + (bucketCount + 1) * bucketSpan.millis(); + long eventEndTime = eventStartTime + (long)(1.5 * bucketSpan.millis()); + events.add(new ScheduledEvent.Builder().description("Some Event") + .startTime(ZonedDateTime.ofInstant(Instant.ofEpochMilli(eventStartTime), ZoneOffset.UTC)) + .endTime(ZonedDateTime.ofInstant(Instant.ofEpochMilli(eventEndTime), ZoneOffset.UTC)) + .calendarId(calendarId).build()); + + postScheduledEvents(calendarId, events); + + // Update the job to be a member of the group + UpdateJobAction.Request jobUpdateRequest = new UpdateJobAction.Request(job.getId(), + new JobUpdate.Builder(job.getId()).setGroups(Collections.singletonList(groupName)).build()); + client().execute(UpdateJobAction.INSTANCE, jobUpdateRequest).actionGet(); + + // Wait until the notification that the job was updated is indexed + assertBusy(() -> { + SearchResponse searchResponse = client().prepareSearch(".ml-notifications") + .setSize(1) + .addSort("timestamp", SortOrder.DESC) + .setQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("job_id", job.getId())) + .filter(QueryBuilders.termQuery("level", "info")) + ).get(); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertThat(hits.length, equalTo(1)); + assertThat(hits[0].getSourceAsMap().get("message"), equalTo("Job updated: [groups]")); + }); + + // write some more buckets of data that cover the scheduled event period + postData(job.getId(), generateData(startTime + bucketCount * bucketSpan.millis(), bucketSpan, 5, + bucketIndex -> randomIntBetween(100, 200)) + .stream().collect(Collectors.joining())); + // and close + closeJob(job.getId()); + + GetBucketsAction.Request getBucketsRequest = new GetBucketsAction.Request(job.getId()); + List buckets = getBuckets(getBucketsRequest); + + // the first 6 buckets have no events + for (int i=0; i<=bucketCount; i++) { + assertEquals(0, buckets.get(i).getScheduledEvents().size()); + } + // 7th and 8th buckets have the event but the last one does not + assertEquals(1, buckets.get(6).getScheduledEvents().size()); + assertEquals("Some Event", buckets.get(6).getScheduledEvents().get(0)); + assertEquals(1, buckets.get(7).getScheduledEvents().size()); + assertEquals("Some Event", buckets.get(7).getScheduledEvents().get(0)); + assertEquals(0, buckets.get(8).getScheduledEvents().size()); + } + private Job.Builder createJob(String jobId, TimeValue bucketSpan) { Detector.Builder detector = new Detector.Builder("count", null); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); diff --git a/x-pack/qa/ml-single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/qa/ml-single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java index 79e9a81831f..0751d7307ae 100644 --- a/x-pack/qa/ml-single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java +++ b/x-pack/qa/ml-single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java @@ -240,6 +240,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32966") public void testHRDSplit() throws Exception { // Create job diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml index cb036b9d13a..ba0f4d5091e 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml @@ -1,9 +1,3 @@ ---- -setup: - - skip: - version: "all" - reason: "Temporarily disabled while backporting https://github.com/elastic/elasticsearch/pull/32816" - --- "Test get old cluster job": - do: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml index 061a242a78d..3a3334f6907 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml @@ -1,9 +1,3 @@ ---- -setup: - - skip: - version: "all" - reason: "Temporarily disabled while backporting https://github.com/elastic/elasticsearch/pull/32816" - --- "Put job on the old cluster and post some data": diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml index 1da16e79cbe..bb47524b41d 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: "all" - reason: "Temporarily disabled while backporting https://github.com/elastic/elasticsearch/pull/32816" - - do: cluster.health: wait_for_status: green diff --git a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle index 5aaa1ed1eff..03505e01ded 100644 --- a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle +++ b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle @@ -1,10 +1 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - -// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737 -if (project.inFipsJvm) { - tasks.withType(RestIntegTestTask) { - enabled = false - } -} - group = "${group}.x-pack.qa.rolling-upgrade.with-system-key" diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java index af3fb160e13..c6502c05d25 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; +import org.elasticsearch.common.CharArrays; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.protocol.xpack.security.User;