Merge branch 'master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-08-17 13:57:28 -07:00
commit a26e108590
90 changed files with 1771 additions and 835 deletions

View File

@ -87,8 +87,15 @@ subprojects {
} }
} }
} }
repositories {
maven {
name = 'localTest'
url = "${rootProject.buildDir}/local-test-repo"
}
}
} }
} }
plugins.withType(BuildPlugin).whenPluginAdded { plugins.withType(BuildPlugin).whenPluginAdded {
project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt') project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
project.noticeFile = project.rootProject.file('NOTICE.txt') project.noticeFile = project.rootProject.file('NOTICE.txt')
@ -228,6 +235,7 @@ subprojects {
"org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}": ':client:rest-high-level', "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}": ':client:rest-high-level',
"org.elasticsearch.client:test:${version}": ':client:test', "org.elasticsearch.client:test:${version}": ':client:test',
"org.elasticsearch.client:transport:${version}": ':client:transport', "org.elasticsearch.client:transport:${version}": ':client:transport',
"org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${version}": ':modules:lang-painless:spi',
"org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.test:framework:${version}": ':test:framework',
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:archives:integ-test-zip', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:archives:integ-test-zip',
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:archives:zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:archives:zip',

View File

@ -162,11 +162,24 @@ if (project != rootProject) {
// it's fine as we run them as part of :buildSrc // it's fine as we run them as part of :buildSrc
test.enabled = false test.enabled = false
task integTest(type: Test) { task integTest(type: Test) {
// integration test requires the local testing repo for example plugin builds
dependsOn project.rootProject.allprojects.collect {
it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'}
}
exclude "**/*Tests.class" exclude "**/*Tests.class"
include "**/*IT.class" include "**/*IT.class"
testClassesDirs = sourceSets.test.output.classesDirs testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath classpath = sourceSets.test.runtimeClasspath
inputs.dir(file("src/testKit")) inputs.dir(file("src/testKit"))
// tell BuildExamplePluginsIT where to find the example plugins
systemProperty (
'test.build-tools.plugin.examples',
files(
project(':example-plugins').subprojects.collect { it.projectDir }
).asPath,
)
systemProperty 'test.local-test-repo-path', "${rootProject.buildDir}/local-test-repo"
systemProperty 'test.lucene-snapshot-revision', (versions.lucene =~ /\w+-snapshot-([a-z0-9]+)/)[0][1]
} }
check.dependsOn(integTest) check.dependsOn(integTest)

View File

@ -554,7 +554,7 @@ class BuildPlugin implements Plugin<Project> {
project.publishing { project.publishing {
publications { publications {
nebula(MavenPublication) { nebula(MavenPublication) {
artifact project.tasks.shadowJar artifacts = [ project.tasks.shadowJar ]
artifactId = project.archivesBaseName artifactId = project.archivesBaseName
/* /*
* Configure the pom to include the "shadow" as compile dependencies * Configure the pom to include the "shadow" as compile dependencies
@ -584,7 +584,6 @@ class BuildPlugin implements Plugin<Project> {
} }
} }
} }
} }
/** Adds compiler settings to the project */ /** Adds compiler settings to the project */
@ -799,6 +798,8 @@ class BuildPlugin implements Plugin<Project> {
systemProperty 'tests.task', path systemProperty 'tests.task', path
systemProperty 'tests.security.manager', 'true' systemProperty 'tests.security.manager', 'true'
systemProperty 'jna.nosys', 'true' systemProperty 'jna.nosys', 'true'
// TODO: remove this deprecation compatibility setting for 7.0
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false'
systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion() systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion()
if (project.ext.inFipsJvm) { if (project.ext.inFipsJvm) {
systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS" systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS"

View File

@ -25,7 +25,6 @@ import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask import org.elasticsearch.gradle.test.RunTask
import org.gradle.api.InvalidUserDataException import org.gradle.api.InvalidUserDataException
import org.gradle.api.JavaVersion
import org.gradle.api.Project import org.gradle.api.Project
import org.gradle.api.Task import org.gradle.api.Task
import org.gradle.api.XmlProvider import org.gradle.api.XmlProvider
@ -39,7 +38,6 @@ import java.nio.file.Path
import java.nio.file.StandardCopyOption import java.nio.file.StandardCopyOption
import java.util.regex.Matcher import java.util.regex.Matcher
import java.util.regex.Pattern import java.util.regex.Pattern
/** /**
* Encapsulates build configuration for an Elasticsearch plugin. * Encapsulates build configuration for an Elasticsearch plugin.
*/ */

View File

@ -20,6 +20,7 @@ package org.elasticsearch.gradle.plugin
import org.gradle.api.Project import org.gradle.api.Project
import org.gradle.api.tasks.Input import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputFile
/** /**
* A container for plugin properties that will be written to the plugin descriptor, for easy * A container for plugin properties that will be written to the plugin descriptor, for easy
@ -55,18 +56,39 @@ class PluginPropertiesExtension {
boolean requiresKeystore = false boolean requiresKeystore = false
/** A license file that should be included in the built plugin zip. */ /** A license file that should be included in the built plugin zip. */
@Input private File licenseFile = null
File licenseFile = null
/** /**
* A notice file that should be included in the built plugin zip. This will be * A notice file that should be included in the built plugin zip. This will be
* extended with notices from the {@code licenses/} directory. * extended with notices from the {@code licenses/} directory.
*/ */
@Input private File noticeFile = null
File noticeFile = null
Project project = null
PluginPropertiesExtension(Project project) { PluginPropertiesExtension(Project project) {
name = project.name name = project.name
version = project.version version = project.version
this.project = project
}
@InputFile
File getLicenseFile() {
return licenseFile
}
void setLicenseFile(File licenseFile) {
project.ext.licenseFile = licenseFile
this.licenseFile = licenseFile
}
@InputFile
File getNoticeFile() {
return noticeFile
}
void setNoticeFile(File noticeFile) {
project.ext.noticeFile = noticeFile
this.noticeFile = noticeFile
} }
} }

View File

@ -23,7 +23,6 @@ import org.gradle.api.InvalidUserDataException
import org.gradle.api.Task import org.gradle.api.Task
import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.OutputFile import org.gradle.api.tasks.OutputFile
/** /**
* Creates a plugin descriptor. * Creates a plugin descriptor.
*/ */

View File

@ -177,6 +177,12 @@ class NodeInfo {
javaVersion = 8 javaVersion = 8
} else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) {
javaVersion = 9 javaVersion = 9
} else if (project.inFipsJvm && nodeVersion.onOrAfter("6.3.0") && nodeVersion.before("6.4.0")) {
/*
* Elasticsearch versions before 6.4.0 cannot be run in a FIPS-140 JVM. If we're running
* bwc tests in a FIPS-140 JVM, ensure that the pre v6.4.0 nodes use a Java 10 JVM instead.
*/
javaVersion = 10
} }
args.addAll("-E", "node.portsfile=true") args.addAll("-E", "node.portsfile=true")

View File

@ -31,6 +31,7 @@ import org.gradle.api.provider.Provider
import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.Input import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskState import org.gradle.api.tasks.TaskState
import org.gradle.plugins.ide.idea.IdeaPlugin
import java.nio.charset.StandardCharsets import java.nio.charset.StandardCharsets
import java.nio.file.Files import java.nio.file.Files
@ -243,10 +244,12 @@ public class RestIntegTestTask extends DefaultTask {
} }
} }
} }
project.idea { if (project.plugins.hasPlugin(IdeaPlugin)) {
module { project.idea {
if (scopes.TEST != null) { module {
scopes.TEST.plus.add(project.configurations.restSpec) if (scopes.TEST != null) {
scopes.TEST.plus.add(project.configurations.restSpec)
}
} }
} }
} }

View File

@ -0,0 +1,164 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.apache.commons.io.FileUtils;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
private static List<File> EXAMPLE_PLUGINS = Collections.unmodifiableList(
Arrays.stream(
Objects.requireNonNull(System.getProperty("test.build-tools.plugin.examples"))
.split(File.pathSeparator)
).map(File::new).collect(Collectors.toList())
);
@Rule
public TemporaryFolder tmpDir = new TemporaryFolder();
public final File examplePlugin;
public BuildExamplePluginsIT(File examplePlugin) {
this.examplePlugin = examplePlugin;
}
@BeforeClass
public static void assertProjectsExist() {
assertEquals(
EXAMPLE_PLUGINS,
EXAMPLE_PLUGINS.stream().filter(File::exists).collect(Collectors.toList())
);
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
return EXAMPLE_PLUGINS
.stream()
.map(each -> new Object[] {each})
.collect(Collectors.toList());
}
public void testCurrentExamplePlugin() throws IOException {
FileUtils.copyDirectory(examplePlugin, tmpDir.getRoot());
// just get rid of deprecation warnings
Files.write(
getTempPath("settings.gradle"),
"enableFeaturePreview('STABLE_PUBLISHING')\n".getBytes(StandardCharsets.UTF_8)
);
adaptBuildScriptForTest();
Files.write(
tmpDir.newFile("NOTICE.txt").toPath(),
"dummy test notice".getBytes(StandardCharsets.UTF_8)
);
GradleRunner.create()
.withProjectDir(tmpDir.getRoot())
.withArguments("clean", "check", "-s", "-i", "--warning-mode=all", "--scan")
.withPluginClasspath()
.build();
}
private void adaptBuildScriptForTest() throws IOException {
// Add the local repo as a build script URL so we can pull in build-tools and apply the plugin under test
// + is ok because we have no other repo and just want to pick up latest
writeBuildScript(
"buildscript {\n" +
" repositories {\n" +
" maven {\n" +
" url = '" + getLocalTestRepoPath() + "'\n" +
" }\n" +
" }\n" +
" dependencies {\n" +
" classpath \"org.elasticsearch.gradle:build-tools:+\"\n" +
" }\n" +
"}\n"
);
// get the original file
Files.readAllLines(getTempPath("build.gradle"), StandardCharsets.UTF_8)
.stream()
.map(line -> line + "\n")
.forEach(this::writeBuildScript);
// Add a repositories section to be able to resolve dependencies
String luceneSnapshotRepo = "";
String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision");
if (luceneSnapshotRepo != null) {
luceneSnapshotRepo = " maven {\n" +
" url \"http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" +
" }\n";
}
writeBuildScript("\n" +
"repositories {\n" +
" maven {\n" +
" url \"" + getLocalTestRepoPath() + "\"\n" +
" }\n" +
luceneSnapshotRepo +
"}\n"
);
Files.delete(getTempPath("build.gradle"));
Files.move(getTempPath("build.gradle.new"), getTempPath("build.gradle"));
System.err.print("Generated build script is:");
Files.readAllLines(getTempPath("build.gradle")).forEach(System.err::println);
}
private Path getTempPath(String fileName) {
return new File(tmpDir.getRoot(), fileName).toPath();
}
private Path writeBuildScript(String script) {
try {
Path path = getTempPath("build.gradle.new");
return Files.write(
path,
script.getBytes(StandardCharsets.UTF_8),
Files.exists(path) ? StandardOpenOption.APPEND : StandardOpenOption.CREATE_NEW
);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private String getLocalTestRepoPath() {
String property = System.getProperty("test.local-test-repo-path");
Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests");
File file = new File(property);
assertTrue("Expected " + property + " to exist, but it did not!", file.exists());
return file.getAbsolutePath();
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
final class MLRequestConverters {
private MLRequestConverters() {}
static Request putJob(PutJobRequest putJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(putJobRequest.getJob().getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request openJob(OpenJobRequest openJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(openJobRequest.getJobId())
.addPathPartAsIs("_open")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setJsonEntity(openJobRequest.toString());
return request;
}
static Request deleteJob(DeleteJobRequest deleteJobRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(deleteJobRequest.getJobId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request);
params.putParam("force", Boolean.toString(deleteJobRequest.isForce()));
return request;
}
}

View File

@ -57,7 +57,7 @@ public final class MachineLearningClient {
*/ */
public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, return restHighLevelClient.performRequestAndParseEntity(request,
RequestConverters::putMachineLearningJob, MLRequestConverters::putJob,
options, options,
PutJobResponse::fromXContent, PutJobResponse::fromXContent,
Collections.emptySet()); Collections.emptySet());
@ -75,7 +75,7 @@ public final class MachineLearningClient {
*/ */
public void putJobAsync(PutJobRequest request, RequestOptions options, ActionListener<PutJobResponse> listener) { public void putJobAsync(PutJobRequest request, RequestOptions options, ActionListener<PutJobResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, restHighLevelClient.performRequestAsyncAndParseEntity(request,
RequestConverters::putMachineLearningJob, MLRequestConverters::putJob,
options, options,
PutJobResponse::fromXContent, PutJobResponse::fromXContent,
listener, listener,
@ -95,7 +95,7 @@ public final class MachineLearningClient {
*/ */
public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, return restHighLevelClient.performRequestAndParseEntity(request,
RequestConverters::deleteMachineLearningJob, MLRequestConverters::deleteJob,
options, options,
DeleteJobResponse::fromXContent, DeleteJobResponse::fromXContent,
Collections.emptySet()); Collections.emptySet());
@ -113,7 +113,7 @@ public final class MachineLearningClient {
*/ */
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) { public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, restHighLevelClient.performRequestAsyncAndParseEntity(request,
RequestConverters::deleteMachineLearningJob, MLRequestConverters::deleteJob,
options, options,
DeleteJobResponse::fromXContent, DeleteJobResponse::fromXContent,
listener, listener,
@ -138,7 +138,7 @@ public final class MachineLearningClient {
*/ */
public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException { public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, return restHighLevelClient.performRequestAndParseEntity(request,
RequestConverters::machineLearningOpenJob, MLRequestConverters::openJob,
options, options,
OpenJobResponse::fromXContent, OpenJobResponse::fromXContent,
Collections.emptySet()); Collections.emptySet());
@ -160,7 +160,7 @@ public final class MachineLearningClient {
*/ */
public void openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener<OpenJobResponse> listener) { public void openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener<OpenJobResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, restHighLevelClient.performRequestAsyncAndParseEntity(request,
RequestConverters::machineLearningOpenJob, MLRequestConverters::openJob,
options, options,
OpenJobResponse::fromXContent, OpenJobResponse::fromXContent,
listener, listener,

View File

@ -116,9 +116,6 @@ import org.elasticsearch.protocol.xpack.indexlifecycle.StopILMRequest;
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction;
@ -1256,46 +1253,6 @@ final class RequestConverters {
return request; return request;
} }
static Request putMachineLearningJob(PutJobRequest putJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(putJobRequest.getJob().getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request deleteMachineLearningJob(DeleteJobRequest deleteJobRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(deleteJobRequest.getJobId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
Params params = new Params(request);
params.putParam("force", Boolean.toString(deleteJobRequest.isForce()));
return request;
}
static Request machineLearningOpenJob(OpenJobRequest openJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(openJobRequest.getJobId())
.addPathPartAsIs("_open")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setJsonEntity(openJobRequest.toString());
return request;
}
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
EndpointBuilder endpointBuilder = new EndpointBuilder() EndpointBuilder endpointBuilder = new EndpointBuilder()
.addPathPartAsIs("_xpack/migration/assistance") .addPathPartAsIs("_xpack/migration/assistance")
@ -1307,7 +1264,7 @@ final class RequestConverters {
return request; return request;
} }
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
} }

View File

@ -0,0 +1,90 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPost;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.protocol.xpack.ml.job.config.Detector;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
public class MLRequestConvertersTests extends ESTestCase {
public void testPutJob() throws IOException {
Job job = createValidJob("foo");
PutJobRequest putJobRequest = new PutJobRequest(job);
Request request = MLRequestConverters.putJob(putJobRequest);
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo"));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
Job parsedJob = Job.PARSER.apply(parser, null).build();
assertThat(parsedJob, equalTo(job));
}
}
public void testOpenJob() throws Exception {
String jobId = "some-job-id";
OpenJobRequest openJobRequest = new OpenJobRequest(jobId);
openJobRequest.setTimeout(TimeValue.timeValueMinutes(10));
Request request = MLRequestConverters.openJob(openJobRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
request.getEntity().writeTo(bos);
assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}");
}
public void testDeleteJob() {
String jobId = randomAlphaOfLength(10);
DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId);
Request request = MLRequestConverters.deleteJob(deleteJobRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint());
assertEquals(Boolean.toString(false), request.getParameters().get("force"));
deleteJobRequest.setForce(true);
request = MLRequestConverters.deleteJob(deleteJobRequest);
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
}
private static Job createValidJob(String jobId) {
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
Detector.builder().setFunction("count").build()));
Job.Builder jobBuilder = Job.builder(jobId);
jobBuilder.setAnalysisConfig(analysisConfig);
return jobBuilder.build();
}
}

View File

@ -131,8 +131,6 @@ import org.elasticsearch.protocol.xpack.indexlifecycle.SetIndexLifecyclePolicyRe
import org.elasticsearch.protocol.xpack.indexlifecycle.StartILMRequest; import org.elasticsearch.protocol.xpack.indexlifecycle.StartILMRequest;
import org.elasticsearch.protocol.xpack.indexlifecycle.StopILMRequest; import org.elasticsearch.protocol.xpack.indexlifecycle.StopILMRequest;
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.repositories.fs.FsRepository;
@ -2674,33 +2672,6 @@ public class RequestConvertersTests extends ESTestCase {
assertThat(request.getEntity(), nullValue()); assertThat(request.getEntity(), nullValue());
} }
public void testDeleteMachineLearningJob() {
String jobId = randomAlphaOfLength(10);
DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId);
Request request = RequestConverters.deleteMachineLearningJob(deleteJobRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint());
assertEquals(Boolean.toString(false), request.getParameters().get("force"));
deleteJobRequest.setForce(true);
request = RequestConverters.deleteMachineLearningJob(deleteJobRequest);
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
}
public void testPostMachineLearningOpenJob() throws Exception {
String jobId = "some-job-id";
OpenJobRequest openJobRequest = new OpenJobRequest(jobId);
openJobRequest.setTimeout(TimeValue.timeValueMinutes(10));
Request request = RequestConverters.machineLearningOpenJob(openJobRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
request.getEntity().writeTo(bos);
assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}");
}
/** /**
* Randomize the {@link FetchSourceContext} request parameters. * Randomize the {@link FetchSourceContext} request parameters.
*/ */

View File

@ -41,6 +41,9 @@ integTestCluster {
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults // TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
systemProperty 'es.scripting.use_java_time', 'false' systemProperty 'es.scripting.use_java_time', 'false'
systemProperty 'es.scripting.update.ctx_in_params', 'false' systemProperty 'es.scripting.update.ctx_in_params', 'false'
// TODO: remove this deprecation compatibility setting for 7.0
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false'
} }
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed // remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed
@ -400,25 +403,25 @@ buildRestTests.setups['stored_scripted_metric_script'] = '''
- do: - do:
put_script: put_script:
id: "my_init_script" id: "my_init_script"
body: { "script": { "lang": "painless", "source": "params._agg.transactions = []" } } body: { "script": { "lang": "painless", "source": "state.transactions = []" } }
- match: { acknowledged: true } - match: { acknowledged: true }
- do: - do:
put_script: put_script:
id: "my_map_script" id: "my_map_script"
body: { "script": { "lang": "painless", "source": "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)" } } body: { "script": { "lang": "painless", "source": "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)" } }
- match: { acknowledged: true } - match: { acknowledged: true }
- do: - do:
put_script: put_script:
id: "my_combine_script" id: "my_combine_script"
body: { "script": { "lang": "painless", "source": "double profit = 0;for (t in params._agg.transactions) { profit += t; } return profit" } } body: { "script": { "lang": "painless", "source": "double profit = 0;for (t in state.transactions) { profit += t; } return profit" } }
- match: { acknowledged: true } - match: { acknowledged: true }
- do: - do:
put_script: put_script:
id: "my_reduce_script" id: "my_reduce_script"
body: { "script": { "lang": "painless", "source": "double profit = 0;for (a in params._aggs) { profit += a; } return profit" } } body: { "script": { "lang": "painless", "source": "double profit = 0;for (a in states) { profit += a; } return profit" } }
- match: { acknowledged: true } - match: { acknowledged: true }
''' '''

View File

@ -47,7 +47,7 @@ POST test/_doc/1/_update
// TEST[continued] // TEST[continued]
We can add a tag to the list of tags (note, if the tag exists, it We can add a tag to the list of tags (note, if the tag exists, it
will still add it, since its a list): will still add it, since it's a list):
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -65,6 +65,28 @@ POST test/_doc/1/_update
// CONSOLE // CONSOLE
// TEST[continued] // TEST[continued]
We can remove a tag from the list of tags. Note that the Painless function to
`remove` a tag takes as its parameter the array index of the element you wish
to remove, so you need a bit more logic to locate it while avoiding a runtime
error. Note that if the tag was present more than once in the list, this will
remove only one occurrence of it:
[source,js]
--------------------------------------------------
POST test/_doc/1/_update
{
"script" : {
"source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }",
"lang": "painless",
"params" : {
"tag" : "blue"
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
In addition to `_source`, the following variables are available through In addition to `_source`, the following variables are available through
the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing` the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing`
and `_now` (the current timestamp). and `_now` (the current timestamp).
@ -172,7 +194,7 @@ the request was ignored.
"_index": "test", "_index": "test",
"_type": "_doc", "_type": "_doc",
"_id": "1", "_id": "1",
"_version": 6, "_version": 7,
"result": "noop" "result": "noop"
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -93,7 +93,8 @@ Replication is important for two primary reasons:
To summarize, each index can be split into multiple shards. An index can also be replicated zero (meaning no replicas) or more times. Once replicated, each index will have primary shards (the original shards that were replicated from) and replica shards (the copies of the primary shards). To summarize, each index can be split into multiple shards. An index can also be replicated zero (meaning no replicas) or more times. Once replicated, each index will have primary shards (the original shards that were replicated from) and replica shards (the copies of the primary shards).
The number of shards and replicas can be defined per index at the time the index is created. After the index is created, you may change the number of replicas dynamically anytime but you cannot change the number of shards after-the-fact.
The number of shards and replicas can be defined per index at the time the index is created. After the index is created, you may also change the number of replicas dynamically anytime. You can change the number of shards for an existing index using the {ref}/indices-shrink-index.html[`_shrink`] and {ref}/indices-split-index.html[`_split`] APIs, however this is not a trivial task and pre-planning for the correct number of shards is the optimal approach.
By default, each index in Elasticsearch is allocated one primary shard and one replica which means that if you have at least two nodes in your cluster, your index will have one primary shard and another replica shard (one complete replica) for a total of two shards per index. By default, each index in Elasticsearch is allocated one primary shard and one replica which means that if you have at least two nodes in your cluster, your index will have one primary shard and another replica shard (one complete replica) for a total of two shards per index.

View File

@ -92,6 +92,9 @@ deprecated in 6.x, has been removed. Context enabled suggestion queries
without contexts have to visit every suggestion, which degrades the search performance without contexts have to visit every suggestion, which degrades the search performance
considerably. considerably.
For geo context the value of the `path` parameter is now validated against the mapping,
and the context is only accepted if `path` points to a field with `geo_point` type.
==== Semantics changed for `max_concurrent_shard_requests` ==== Semantics changed for `max_concurrent_shard_requests`
`max_concurrent_shard_requests` used to limit the total number of concurrent shard `max_concurrent_shard_requests` used to limit the total number of concurrent shard

View File

@ -16,13 +16,14 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
name 'custom-settings' name 'custom-settings'
description 'An example plugin showing how to register custom settings' description 'An example plugin showing how to register custom settings'
classname 'org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin' classname 'org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
} }
integTestCluster { integTestCluster {

View File

@ -23,6 +23,8 @@ esplugin {
name 'custom-suggester' name 'custom-suggester'
description 'An example plugin showing how to write and register a custom suggester' description 'An example plugin showing how to write and register a custom suggester'
classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
} }
integTestCluster { integTestCluster {
@ -30,4 +32,4 @@ integTestCluster {
} }
// this plugin has no unit tests, only rest tests // this plugin has no unit tests, only rest tests
tasks.test.enabled = false tasks.test.enabled = false

View File

@ -16,7 +16,6 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
@ -24,10 +23,12 @@ esplugin {
description 'An example whitelisting additional classes and methods in painless' description 'An example whitelisting additional classes and methods in painless'
classname 'org.elasticsearch.example.painlesswhitelist.MyWhitelistPlugin' classname 'org.elasticsearch.example.painlesswhitelist.MyWhitelistPlugin'
extendedPlugins = ['lang-painless'] extendedPlugins = ['lang-painless']
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
} }
dependencies { dependencies {
compileOnly project(':modules:lang-painless') compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}"
} }
if (System.getProperty('tests.distribution') == null) { if (System.getProperty('tests.distribution') == null) {

View File

@ -16,11 +16,13 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
name 'example-rescore' name 'example-rescore'
description 'An example plugin implementing rescore and verifying that plugins *can* implement rescore' description 'An example plugin implementing rescore and verifying that plugins *can* implement rescore'
classname 'org.elasticsearch.example.rescore.ExampleRescorePlugin' classname 'org.elasticsearch.example.rescore.ExampleRescorePlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
} }

View File

@ -16,13 +16,14 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
name 'rest-handler' name 'rest-handler'
description 'An example plugin showing how to register a REST handler' description 'An example plugin showing how to register a REST handler'
classname 'org.elasticsearch.example.resthandler.ExampleRestHandlerPlugin' classname 'org.elasticsearch.example.resthandler.ExampleRestHandlerPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
} }
// No unit tests in this example // No unit tests in this example
@ -40,4 +41,4 @@ integTestCluster {
} }
integTestRunner { integTestRunner {
systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }"
} }

View File

@ -16,13 +16,15 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
name 'script-expert-scoring' name 'script-expert-scoring'
description 'An example script engine to use low level Lucene internals for expert scoring' description 'An example script engine to use low level Lucene internals for expert scoring'
classname 'org.elasticsearch.example.expertscript.ExpertScriptPlugin' classname 'org.elasticsearch.example.expertscript.ExpertScriptPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
} }
test.enabled = false test.enabled = false

View File

@ -342,3 +342,15 @@ if (isEclipse == false || project.path == ":server-tests") {
integTest.mustRunAfter test integTest.mustRunAfter test
} }
// TODO: remove these compatibility tests in 7.0
additionalTest('testScriptedMetricAggParamsV6Compatibility') {
include '**/ScriptedMetricAggregatorAggStateV6CompatTests.class'
include '**/InternalScriptedMetricAggStateV6CompatTests.class'
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'true'
}
test {
// these are tested explicitly in separate test tasks
exclude '**/ScriptedMetricAggregatorAggStateV6CompatTests.class'
exclude '**/InternalScriptedMetricAggStateV6CompatTests.class'
}

View File

@ -52,6 +52,7 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -421,6 +422,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers, MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers,
fullPathObjectMappers, fieldTypes); fullPathObjectMappers, fieldTypes);
ContextMapping.validateContextPaths(indexSettings.getIndexVersionCreated(), fieldMappers, fieldTypes::get);
if (reason == MergeReason.MAPPING_UPDATE) { if (reason == MergeReason.MAPPING_UPDATE) {
// this check will only be performed on the master node when there is // this check will only be performed on the master node when there is
// a call to the update mapping API. For all other cases like // a call to the update mapping API. For all other cases like

View File

@ -22,6 +22,8 @@ package org.elasticsearch.script;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
@ -31,6 +33,25 @@ import java.util.List;
import java.util.Map; import java.util.Map;
public class ScriptedMetricAggContexts { public class ScriptedMetricAggContexts {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(ScriptedMetricAggContexts.class));
// Public for access from tests
public static final String AGG_PARAM_DEPRECATION_WARNING =
"params._agg/_aggs for scripted metric aggregations are deprecated, use state/states (not in params) instead. " +
"Use -Des.aggregations.enable_scripted_metric_agg_param=false to disable.";
public static boolean deprecatedAggParamEnabled() {
boolean enabled = Boolean.parseBoolean(
System.getProperty("es.aggregations.enable_scripted_metric_agg_param", "true"));
if (enabled) {
DEPRECATION_LOGGER.deprecatedAndMaybeLog("enable_scripted_metric_agg_param", AGG_PARAM_DEPRECATION_WARNING);
}
return enabled;
}
private abstract static class ParamsAndStateBase { private abstract static class ParamsAndStateBase {
private final Map<String, Object> params; private final Map<String, Object> params;
private final Object state; private final Object state;

View File

@ -209,7 +209,10 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
} }
} }
if (changed) { if (changed) {
return new FiltersAggregationBuilder(getName(), rewrittenFilters, this.keyed); FiltersAggregationBuilder rewritten = new FiltersAggregationBuilder(getName(), rewrittenFilters, this.keyed);
rewritten.otherBucket(otherBucket);
rewritten.otherBucketKey(otherBucketKey);
return rewritten;
} else { } else {
return this; return this;
} }

View File

@ -156,7 +156,7 @@ public class AutoDateHistogramAggregationBuilder
return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData);
} }
private static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) { static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) {
Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); Rounding.Builder tzRoundingBuilder = Rounding.builder(interval);
if (timeZone != null) { if (timeZone != null) {
tzRoundingBuilder.timeZone(timeZone); tzRoundingBuilder.timeZone(timeZone);

View File

@ -418,7 +418,7 @@ public final class InternalAutoDateHistogram extends
return currentResult; return currentResult;
} }
int roundingIdx = getAppropriateRounding(list.get(0).key, list.get(list.size() - 1).key, currentResult.roundingIdx, int roundingIdx = getAppropriateRounding(list.get(0).key, list.get(list.size() - 1).key, currentResult.roundingIdx,
bucketInfo.roundingInfos); bucketInfo.roundingInfos, targetBuckets);
RoundingInfo roundingInfo = bucketInfo.roundingInfos[roundingIdx]; RoundingInfo roundingInfo = bucketInfo.roundingInfos[roundingIdx];
Rounding rounding = roundingInfo.rounding; Rounding rounding = roundingInfo.rounding;
// merge buckets using the new rounding // merge buckets using the new rounding
@ -447,8 +447,8 @@ public final class InternalAutoDateHistogram extends
return new BucketReduceResult(list, roundingInfo, roundingIdx); return new BucketReduceResult(list, roundingInfo, roundingIdx);
} }
private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx,
RoundingInfo[] roundings) { RoundingInfo[] roundings, int targetBuckets) {
if (roundingIdx == roundings.length - 1) { if (roundingIdx == roundings.length - 1) {
return roundingIdx; return roundingIdx;
} }
@ -480,7 +480,7 @@ public final class InternalAutoDateHistogram extends
currentKey = currentRounding.nextRoundingValue(currentKey); currentKey = currentRounding.nextRoundingValue(currentKey);
} }
currentRoundingIdx++; currentRoundingIdx++;
} while (requiredBuckets > (targetBuckets * roundings[roundingIdx].getMaximumInnerInterval()) } while (requiredBuckets > (targetBuckets * roundings[currentRoundingIdx - 1].getMaximumInnerInterval())
&& currentRoundingIdx < roundings.length); && currentRoundingIdx < roundings.length);
// The loop will increase past the correct rounding index here so we // The loop will increase past the correct rounding index here so we
// need to subtract one to get the rounding index we need // need to subtract one to get the rounding index we need

View File

@ -96,7 +96,9 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip
} }
// Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below).
params.put("_aggs", aggregationObjects); if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) {
params.put("_aggs", aggregationObjects);
}
ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile(
firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT);

View File

@ -83,10 +83,17 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory<ScriptedM
// Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below). // Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below).
// When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map<String, Object>, since // When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map<String, Object>, since
// it won't be possible to completely replace it with another type as is possible when it's an entry in params. // it won't be possible to completely replace it with another type as is possible when it's an entry in params.
if (aggParams.containsKey("_agg") == false) { Object aggState = new HashMap<String, Object>();
aggParams.put("_agg", new HashMap<String, Object>()); if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) {
if (aggParams.containsKey("_agg") == false) {
// Add _agg if it wasn't added manually
aggParams.put("_agg", aggState);
} else {
// If it was added manually, also use it for the agg context variable to reduce the likelihood of
// weird behavior due to multiple different variables.
aggState = aggParams.get("_agg");
}
} }
Object aggState = aggParams.get("_agg");
final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance(
mergeParams(aggParams, initScriptParams), aggState); mergeParams(aggParams, initScriptParams), aggState);

View File

@ -220,7 +220,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
/** /**
* Set encoder for the highlighting * Set encoder for the highlighting
* are {@code styled} and {@code default}. * are {@code html} and {@code default}.
* *
* @param encoder name * @param encoder name
*/ */

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.suggest.completion.context; package org.elasticsearch.search.suggest.completion.context;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -28,6 +29,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import java.io.IOException; import java.io.IOException;
@ -35,6 +38,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.function.Function;
/** /**
* A {@link ContextMapping} defines criteria that can be used to * A {@link ContextMapping} defines criteria that can be used to
@ -131,6 +135,31 @@ public abstract class ContextMapping<T extends ToXContent> implements ToXContent
*/ */
protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException; protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException;
/**
* Checks if the current context is consistent with the rest of the fields. For example, the GeoContext
* should check that the field that it points to has the correct type.
*/
protected void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
// No validation is required by default
}
/**
* Verifies that all field paths specified in contexts point to the fields with correct mappings
*/
public static void validateContextPaths(Version indexVersionCreated, List<FieldMapper> fieldMappers,
Function<String, MappedFieldType> fieldResolver) {
for (FieldMapper fieldMapper : fieldMappers) {
if (CompletionFieldMapper.CONTENT_TYPE.equals(fieldMapper.typeName())) {
CompletionFieldMapper.CompletionFieldType fieldType = ((CompletionFieldMapper) fieldMapper).fieldType();
if (fieldType.hasContextMappings()) {
for (ContextMapping context : fieldType.getContextMappings()) {
context.validateReferences(indexVersionCreated, fieldResolver);
}
}
}
}
}
@Override @Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(FIELD_NAME, name); builder.field(FIELD_NAME, name);

View File

@ -37,6 +37,7 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@ -50,7 +51,7 @@ import static org.elasticsearch.search.suggest.completion.context.ContextMapping
* and creates context queries for defined {@link ContextMapping}s * and creates context queries for defined {@link ContextMapping}s
* for a {@link CompletionFieldMapper} * for a {@link CompletionFieldMapper}
*/ */
public class ContextMappings implements ToXContent { public class ContextMappings implements ToXContent, Iterable<ContextMapping<?>> {
private final List<ContextMapping<?>> contextMappings; private final List<ContextMapping<?>> contextMappings;
private final Map<String, ContextMapping<?>> contextNameMap; private final Map<String, ContextMapping<?>> contextNameMap;
@ -97,6 +98,11 @@ public class ContextMappings implements ToXContent {
document.add(new TypedContextField(name, input, weight, contexts, document)); document.add(new TypedContextField(name, input, weight, contexts, document));
} }
@Override
public Iterator<ContextMapping<?>> iterator() {
return contextMappings.iterator();
}
/** /**
* Field prepends context values with a suggestion * Field prepends context values with a suggestion
* Context values are associated with a type, denoted by * Context values are associated with a type, denoted by

View File

@ -19,12 +19,17 @@
package org.elasticsearch.search.suggest.completion.context; package org.elasticsearch.search.suggest.completion.context;
import org.apache.logging.log4j.LogManager;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -42,6 +47,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors; import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors;
@ -69,6 +75,8 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
static final String CONTEXT_PRECISION = "precision"; static final String CONTEXT_PRECISION = "precision";
static final String CONTEXT_NEIGHBOURS = "neighbours"; static final String CONTEXT_NEIGHBOURS = "neighbours";
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(GeoContextMapping.class));
private final int precision; private final int precision;
private final String fieldName; private final String fieldName;
@ -205,11 +213,11 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
for (IndexableField field : fields) { for (IndexableField field : fields) {
if (field instanceof StringField) { if (field instanceof StringField) {
spare.resetFromString(field.stringValue()); spare.resetFromString(field.stringValue());
} else { geohashes.add(spare.geohash());
// todo return this to .stringValue() once LatLonPoint implements it } else if (field instanceof LatLonPoint || field instanceof LatLonDocValuesField) {
spare.resetFromIndexableField(field); spare.resetFromIndexableField(field);
geohashes.add(spare.geohash());
} }
geohashes.add(spare.geohash());
} }
} }
} }
@ -279,6 +287,32 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
return internalQueryContextList; return internalQueryContextList;
} }
@Override
protected void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
if (fieldName != null) {
MappedFieldType mappedFieldType = fieldResolver.apply(fieldName);
if (mappedFieldType == null) {
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping",
"field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name);
} else {
throw new ElasticsearchParseException(
"field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name);
}
} else if (GeoPointFieldMapper.CONTENT_TYPE.equals(mappedFieldType.typeName()) == false) {
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping",
"field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]",
fieldName, name, mappedFieldType.typeName());
} else {
throw new ElasticsearchParseException(
"field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]",
fieldName, name, mappedFieldType.typeName());
}
}
}
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;

View File

@ -69,7 +69,7 @@ public class DateTimeUnitTests extends ESTestCase {
public void testConversion() { public void testConversion() {
long millis = randomLongBetween(0, Instant.now().toEpochMilli()); long millis = randomLongBetween(0, Instant.now().toEpochMilli());
DateTimeZone zone = randomDateTimeZone(); DateTimeZone zone = randomDateTimeZone();
ZoneId zoneId = ZoneId.of(zone.getID()); ZoneId zoneId = zone.toTimeZone().toZoneId();
int offsetSeconds = zoneId.getRules().getOffset(Instant.ofEpochMilli(millis)).getTotalSeconds(); int offsetSeconds = zoneId.getRules().getOffset(Instant.ofEpochMilli(millis)).getTotalSeconds();
long parsedMillisJavaTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId) long parsedMillisJavaTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId)

View File

@ -2391,8 +2391,7 @@ public class IndexShardTests extends IndexShardTestCase {
closeShards(sourceShard, targetShard); closeShards(sourceShard, targetShard);
} }
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32766") public void testDocStats() throws IOException, InterruptedException {
public void testDocStats() throws IOException {
IndexShard indexShard = null; IndexShard indexShard = null;
try { try {
indexShard = newStartedShard(); indexShard = newStartedShard();
@ -2441,8 +2440,6 @@ public class IndexShardTests extends IndexShardTestCase {
assertTrue(searcher.reader().numDocs() <= docStats.getCount()); assertTrue(searcher.reader().numDocs() <= docStats.getCount());
} }
assertThat(docStats.getCount(), equalTo(numDocs)); assertThat(docStats.getCount(), equalTo(numDocs));
// Lucene will delete a segment if all docs are deleted from it; this means that we lose the deletes when deleting all docs
assertThat(docStats.getDeleted(), equalTo(numDocsToDelete == numDocs ? 0 : numDocsToDelete));
} }
// merge them away // merge them away

View File

@ -43,7 +43,6 @@ import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.translog.SnapshotMatchers; import org.elasticsearch.index.translog.SnapshotMatchers;
import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.test.junit.annotations.TestLogging;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -74,7 +73,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase {
} }
} }
@TestLogging("_root:TRACE") @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32089")
public void testRetentionPolicyChangeDuringRecovery() throws Exception { public void testRetentionPolicyChangeDuringRecovery() throws Exception {
try (ReplicationGroup shards = createGroup(0)) { try (ReplicationGroup shards = createGroup(0)) {
shards.startPrimary(); shards.startPrimary();

View File

@ -178,4 +178,18 @@ public class FiltersTests extends BaseAggregationTestCase<FiltersAggregationBuil
assertSame(rewritten, assertSame(rewritten,
rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L))); rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)));
} }
public void testRewritePreservesOtherBucket() throws IOException {
FiltersAggregationBuilder originalFilters = new FiltersAggregationBuilder("my-agg", new BoolQueryBuilder());
originalFilters.otherBucket(randomBoolean());
originalFilters.otherBucketKey(randomAlphaOfLength(10));
AggregationBuilder rewritten = originalFilters.rewrite(new QueryRewriteContext(xContentRegistry(),
null, null, () -> 0L));
assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class));
FiltersAggregationBuilder rewrittenFilters = (FiltersAggregationBuilder) rewritten;
assertEquals(originalFilters.otherBucket(), rewrittenFilters.otherBucket());
assertEquals(originalFilters.otherBucketKey(), rewrittenFilters.otherBucketKey());
}
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram; package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
@ -28,7 +29,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHi
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -39,6 +44,8 @@ import java.util.TreeMap;
import static org.elasticsearch.common.unit.TimeValue.timeValueHours; import static org.elasticsearch.common.unit.TimeValue.timeValueHours;
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.createRounding;
import static org.hamcrest.Matchers.equalTo;
public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalAutoDateHistogram> { public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalAutoDateHistogram> {
@ -61,6 +68,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
int nbBuckets = randomNumberOfBuckets(); int nbBuckets = randomNumberOfBuckets();
int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1);
List<InternalAutoDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets); List<InternalAutoDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
long startingDate = System.currentTimeMillis(); long startingDate = System.currentTimeMillis();
long interval = randomIntBetween(1, 3); long interval = randomIntBetween(1, 3);
@ -72,23 +80,41 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
} }
InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList());
BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData);
} }
/*
This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals
passed in, instead of using the interval associated with the loop.
*/
public void testGetAppropriateRoundingUsesCorrectIntervals() {
RoundingInfo[] roundings = new RoundingInfo[6];
DateTimeZone timeZone = DateTimeZone.UTC;
// Since we pass 0 as the starting index to getAppropriateRounding, we'll also use
// an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval()
// will be larger than the estimate.
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, 1000);
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, 1, 5, 10, 30);
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, 1, 3, 12);
OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC);
// We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function
// to increment the rounding (because the bug was that the function would not use the innerIntervals
// from the new rounding.
int result = InternalAutoDateHistogram.getAppropriateRounding(timestamp.toEpochSecond()*1000,
timestamp.plusDays(1).toEpochSecond()*1000, 0, roundings, 25);
assertThat(result, equalTo(2));
}
@Override @Override
protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAutoDateHistogram> inputs) { protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAutoDateHistogram> inputs) {
int roundingIdx = 0;
for (InternalAutoDateHistogram histogram : inputs) {
if (histogram.getBucketInfo().roundingIdx > roundingIdx) {
roundingIdx = histogram.getBucketInfo().roundingIdx;
}
}
RoundingInfo roundingInfo = roundingInfos[roundingIdx];
long lowest = Long.MAX_VALUE; long lowest = Long.MAX_VALUE;
long highest = 0; long highest = 0;
for (InternalAutoDateHistogram histogram : inputs) { for (InternalAutoDateHistogram histogram : inputs) {
for (Histogram.Bucket bucket : histogram.getBuckets()) { for (Histogram.Bucket bucket : histogram.getBuckets()) {
long bucketKey = ((DateTime) bucket.getKey()).getMillis(); long bucketKey = ((DateTime) bucket.getKey()).getMillis();
@ -100,35 +126,72 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
} }
} }
} }
int roundingIndex = reduced.getBucketInfo().roundingIdx;
RoundingInfo roundingInfo = roundingInfos[roundingIndex];
long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis();
long innerIntervalToUse = 0; long innerIntervalToUse = roundingInfo.innerIntervals[0];
for (int interval : roundingInfo.innerIntervals) { int innerIntervalIndex = 0;
if (normalizedDuration / interval < maxNumberOfBuckets()) {
innerIntervalToUse = interval; // First, try to calculate the correct innerInterval using the normalizedDuration.
// This handles cases where highest and lowest are further apart than the interval being used.
if (normalizedDuration != 0) {
for (int j = roundingInfo.innerIntervals.length-1; j >= 0; j--) {
int interval = roundingInfo.innerIntervals[j];
if (normalizedDuration / interval < reduced.getBuckets().size()) {
innerIntervalToUse = interval;
innerIntervalIndex = j;
}
} }
} }
long intervalInMillis = innerIntervalToUse * roundingInfo.getRoughEstimateDurationMillis();
int bucketCount = getBucketCount(lowest, highest, roundingInfo, intervalInMillis);
//Next, if our bucketCount is still above what we need, we'll go back and determine the interval
// based on a size calculation.
if (bucketCount > reduced.getBuckets().size()) {
for (int i = innerIntervalIndex; i < roundingInfo.innerIntervals.length; i++) {
long newIntervalMillis = roundingInfo.innerIntervals[i] * roundingInfo.getRoughEstimateDurationMillis();
if (getBucketCount(lowest, highest, roundingInfo, newIntervalMillis) <= reduced.getBuckets().size()) {
innerIntervalToUse = roundingInfo.innerIntervals[i];
intervalInMillis = innerIntervalToUse * roundingInfo.getRoughEstimateDurationMillis();
}
}
}
Map<Long, Long> expectedCounts = new TreeMap<>(); Map<Long, Long> expectedCounts = new TreeMap<>();
long intervalInMillis = innerIntervalToUse*roundingInfo.getRoughEstimateDurationMillis();
for (long keyForBucket = roundingInfo.rounding.round(lowest); for (long keyForBucket = roundingInfo.rounding.round(lowest);
keyForBucket <= highest; keyForBucket <= roundingInfo.rounding.round(highest);
keyForBucket = keyForBucket + intervalInMillis) { keyForBucket = keyForBucket + intervalInMillis) {
expectedCounts.put(keyForBucket, 0L); expectedCounts.put(keyForBucket, 0L);
// Iterate through the input buckets, and for each bucket, determine if it's inside
// the range of the bucket in the outer loop. if it is, add the doc count to the total
// for that bucket.
for (InternalAutoDateHistogram histogram : inputs) { for (InternalAutoDateHistogram histogram : inputs) {
for (Histogram.Bucket bucket : histogram.getBuckets()) { for (Histogram.Bucket bucket : histogram.getBuckets()) {
long bucketKey = ((DateTime) bucket.getKey()).getMillis(); long roundedBucketKey = roundingInfo.rounding.round(((DateTime) bucket.getKey()).getMillis());
long roundedBucketKey = roundingInfo.rounding.round(bucketKey); long docCount = bucket.getDocCount();
if (roundedBucketKey >= keyForBucket if (roundedBucketKey >= keyForBucket
&& roundedBucketKey < keyForBucket + intervalInMillis) { && roundedBucketKey < keyForBucket + intervalInMillis) {
long count = bucket.getDocCount();
expectedCounts.compute(keyForBucket, expectedCounts.compute(keyForBucket,
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + count); (key, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount);
} }
} }
} }
} }
// If there is only a single bucket, and we haven't added it above, add a bucket with no documents.
// this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above.
if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) {
expectedCounts.put(roundingInfo.rounding.round(lowest), 0L);
}
// pick out the actual reduced values to the make the assertion more readable
Map<Long, Long> actualCounts = new TreeMap<>(); Map<Long, Long> actualCounts = new TreeMap<>();
for (Histogram.Bucket bucket : reduced.getBuckets()) { for (Histogram.Bucket bucket : reduced.getBuckets()) {
actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), actualCounts.compute(((DateTime) bucket.getKey()).getMillis(),
@ -137,12 +200,16 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
assertEquals(expectedCounts, actualCounts); assertEquals(expectedCounts, actualCounts);
} }
@Override private int getBucketCount(long lowest, long highest, RoundingInfo roundingInfo, long intervalInMillis) {
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") int bucketCount = 0;
public void testReduceRandom() { for (long keyForBucket = roundingInfo.rounding.round(lowest);
super.testReduceRandom(); keyForBucket <= roundingInfo.rounding.round(highest);
keyForBucket = keyForBucket + intervalInMillis) {
bucketCount++;
}
return bucketCount;
} }
@Override @Override
protected Writeable.Reader<InternalAutoDateHistogram> instanceReader() { protected Writeable.Reader<InternalAutoDateHistogram> instanceReader() {
return InternalAutoDateHistogram::new; return InternalAutoDateHistogram::new;

View File

@ -67,6 +67,7 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@ -90,42 +91,57 @@ public class ScriptedMetricIT extends ESIntegTestCase {
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>(); Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("_agg['count'] = 1", vars -> scripts.put("state['count'] = 1", vars ->
aggScript(vars, agg -> ((Map<String, Object>) agg).put("count", 1))); aggScript(vars, state -> state.put("count", 1)));
scripts.put("_agg.add(1)", vars -> scripts.put("state.list.add(1)", vars ->
aggScript(vars, agg -> ((List) agg).add(1))); aggScript(vars, state -> {
// Lazily populate state.list for tests without an init script
if (state.containsKey("list") == false) {
state.put("list", new ArrayList());
}
scripts.put("_agg[param1] = param2", vars -> ((List) state.get("list")).add(1);
aggScript(vars, agg -> ((Map) agg).put(XContentMapValues.extractValue("params.param1", vars), }));
scripts.put("state[param1] = param2", vars ->
aggScript(vars, state -> state.put((String) XContentMapValues.extractValue("params.param1", vars),
XContentMapValues.extractValue("params.param2", vars)))); XContentMapValues.extractValue("params.param2", vars))));
scripts.put("vars.multiplier = 3", vars -> scripts.put("vars.multiplier = 3", vars ->
((Map<String, Object>) vars.get("vars")).put("multiplier", 3)); ((Map<String, Object>) vars.get("vars")).put("multiplier", 3));
scripts.put("_agg.add(vars.multiplier)", vars -> scripts.put("state.list.add(vars.multiplier)", vars ->
aggScript(vars, agg -> ((List) agg).add(XContentMapValues.extractValue("vars.multiplier", vars)))); aggScript(vars, state -> {
// Lazily populate state.list for tests without an init script
if (state.containsKey("list") == false) {
state.put("list", new ArrayList());
}
((List) state.get("list")).add(XContentMapValues.extractValue("vars.multiplier", vars));
}));
// Equivalent to: // Equivalent to:
// //
// newaggregation = []; // newaggregation = [];
// sum = 0; // sum = 0;
// //
// for (a in _agg) { // for (s in state.list) {
// sum += a // sum += s
// }; // };
// //
// newaggregation.add(sum); // newaggregation.add(sum);
// return newaggregation" // return newaggregation"
// //
scripts.put("sum agg values as a new aggregation", vars -> { scripts.put("sum state values as a new aggregation", vars -> {
List newAggregation = new ArrayList(); List newAggregation = new ArrayList();
List<?> agg = (List<?>) vars.get("_agg"); Map<String, Object> state = (Map<String, Object>) vars.get("state");
List<?> list = (List<?>) state.get("list");
if (agg != null) { if (list != null) {
Integer sum = 0; Integer sum = 0;
for (Object a : (List) agg) { for (Object s : list) {
sum += ((Number) a).intValue(); sum += ((Number) s).intValue();
} }
newAggregation.add(sum); newAggregation.add(sum);
} }
@ -137,24 +153,41 @@ public class ScriptedMetricIT extends ESIntegTestCase {
// newaggregation = []; // newaggregation = [];
// sum = 0; // sum = 0;
// //
// for (aggregation in _aggs) { // for (state in states) {
// for (a in aggregation) { // for (s in state) {
// sum += a // sum += s
// } // }
// }; // };
// //
// newaggregation.add(sum); // newaggregation.add(sum);
// return newaggregation" // return newaggregation"
// //
scripts.put("sum aggs of agg values as a new aggregation", vars -> { scripts.put("sum all states (lists) values as a new aggregation", vars -> {
List newAggregation = new ArrayList(); List newAggregation = new ArrayList();
Integer sum = 0; Integer sum = 0;
List<?> aggs = (List<?>) vars.get("_aggs"); List<List<?>> states = (List<List<?>>) vars.get("states");
for (Object aggregation : (List) aggs) { for (List<?> list : states) {
if (aggregation != null) { if (list != null) {
for (Object a : (List) aggregation) { for (Object s : list) {
sum += ((Number) a).intValue(); sum += ((Number) s).intValue();
}
}
}
newAggregation.add(sum);
return newAggregation;
});
scripts.put("sum all states' state.list values as a new aggregation", vars -> {
List newAggregation = new ArrayList();
Integer sum = 0;
List<Map<String, Object>> states = (List<Map<String, Object>>) vars.get("states");
for (Map<String, Object> state : states) {
List<?> list = (List<?>) state.get("list");
if (list != null) {
for (Object s : list) {
sum += ((Number) s).intValue();
} }
} }
} }
@ -167,25 +200,25 @@ public class ScriptedMetricIT extends ESIntegTestCase {
// newaggregation = []; // newaggregation = [];
// sum = 0; // sum = 0;
// //
// for (aggregation in _aggs) { // for (state in states) {
// for (a in aggregation) { // for (s in state) {
// sum += a // sum += s
// } // }
// }; // };
// //
// newaggregation.add(sum * multiplier); // newaggregation.add(sum * multiplier);
// return newaggregation" // return newaggregation"
// //
scripts.put("multiplied sum aggs of agg values as a new aggregation", vars -> { scripts.put("multiplied sum all states (lists) values as a new aggregation", vars -> {
Integer multiplier = (Integer) vars.get("multiplier"); Integer multiplier = (Integer) vars.get("multiplier");
List newAggregation = new ArrayList(); List newAggregation = new ArrayList();
Integer sum = 0; Integer sum = 0;
List<?> aggs = (List<?>) vars.get("_aggs"); List<List<?>> states = (List<List<?>>) vars.get("states");
for (Object aggregation : (List) aggs) { for (List<?> list : states) {
if (aggregation != null) { if (list != null) {
for (Object a : (List) aggregation) { for (Object s : list) {
sum += ((Number) a).intValue(); sum += ((Number) s).intValue();
} }
} }
} }
@ -193,53 +226,12 @@ public class ScriptedMetricIT extends ESIntegTestCase {
return newAggregation; return newAggregation;
}); });
scripts.put("state.items = new ArrayList()", vars ->
aggContextScript(vars, state -> ((HashMap) state).put("items", new ArrayList())));
scripts.put("state.items.add(1)", vars ->
aggContextScript(vars, state -> {
HashMap stateMap = (HashMap) state;
List items = (List) stateMap.get("items");
items.add(1);
}));
scripts.put("sum context state values", vars -> {
int sum = 0;
HashMap state = (HashMap) vars.get("state");
List items = (List) state.get("items");
for (Object x : items) {
sum += (Integer)x;
}
return sum;
});
scripts.put("sum context states", vars -> {
Integer sum = 0;
List<?> states = (List<?>) vars.get("states");
for (Object state : states) {
sum += ((Number) state).intValue();
}
return sum;
});
return scripts; return scripts;
} }
static <T> Object aggScript(Map<String, Object> vars, Consumer<T> fn) {
return aggScript(vars, fn, "_agg");
}
static <T> Object aggContextScript(Map<String, Object> vars, Consumer<T> fn) {
return aggScript(vars, fn, "state");
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static <T> Object aggScript(Map<String, Object> vars, Consumer<T> fn, String stateVarName) { static Map<String, Object> aggScript(Map<String, Object> vars, Consumer<Map<String, Object>> fn) {
T aggState = (T) vars.get(stateVarName); Map<String, Object> aggState = (Map<String, Object>) vars.get("state");
fn.accept(aggState); fn.accept(aggState);
return aggState; return aggState;
} }
@ -285,17 +277,17 @@ public class ScriptedMetricIT extends ESIntegTestCase {
assertAcked(client().admin().cluster().preparePutStoredScript() assertAcked(client().admin().cluster().preparePutStoredScript()
.setId("mapScript_stored") .setId("mapScript_stored")
.setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," +
" \"source\": \"_agg.add(vars.multiplier)\"} }"), XContentType.JSON)); " \"source\": \"state.list.add(vars.multiplier)\"} }"), XContentType.JSON));
assertAcked(client().admin().cluster().preparePutStoredScript() assertAcked(client().admin().cluster().preparePutStoredScript()
.setId("combineScript_stored") .setId("combineScript_stored")
.setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," +
" \"source\": \"sum agg values as a new aggregation\"} }"), XContentType.JSON)); " \"source\": \"sum state values as a new aggregation\"} }"), XContentType.JSON));
assertAcked(client().admin().cluster().preparePutStoredScript() assertAcked(client().admin().cluster().preparePutStoredScript()
.setId("reduceScript_stored") .setId("reduceScript_stored")
.setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," +
" \"source\": \"sum aggs of agg values as a new aggregation\"} }"), XContentType.JSON)); " \"source\": \"sum all states (lists) values as a new aggregation\"} }"), XContentType.JSON));
indexRandom(true, builders); indexRandom(true, builders);
ensureSearchable(); ensureSearchable();
@ -315,9 +307,10 @@ public class ScriptedMetricIT extends ESIntegTestCase {
// the name of the file script is used in test method while the source of the file script // the name of the file script is used in test method while the source of the file script
// must match a predefined script from CustomScriptPlugin.pluginScripts() method // must match a predefined script from CustomScriptPlugin.pluginScripts() method
Files.write(scripts.resolve("init_script.mockscript"), "vars.multiplier = 3".getBytes("UTF-8")); Files.write(scripts.resolve("init_script.mockscript"), "vars.multiplier = 3".getBytes("UTF-8"));
Files.write(scripts.resolve("map_script.mockscript"), "_agg.add(vars.multiplier)".getBytes("UTF-8")); Files.write(scripts.resolve("map_script.mockscript"), "state.list.add(vars.multiplier)".getBytes("UTF-8"));
Files.write(scripts.resolve("combine_script.mockscript"), "sum agg values as a new aggregation".getBytes("UTF-8")); Files.write(scripts.resolve("combine_script.mockscript"), "sum state values as a new aggregation".getBytes("UTF-8"));
Files.write(scripts.resolve("reduce_script.mockscript"), "sum aggs of agg values as a new aggregation".getBytes("UTF-8")); Files.write(scripts.resolve("reduce_script.mockscript"),
"sum all states (lists) values as a new aggregation".getBytes("UTF-8"));
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("failed to create scripts"); throw new RuntimeException("failed to create scripts");
} }
@ -329,7 +322,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
} }
public void testMap() { public void testMap() {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg['count'] = 1", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap());
SearchResponse response = client().prepareSearch("idx") SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -365,52 +358,12 @@ public class ScriptedMetricIT extends ESIntegTestCase {
assertThat(numShardsRun, greaterThan(0)); assertThat(numShardsRun, greaterThan(0));
} }
public void testExplicitAggParam() { public void testMapWithParams() {
Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", Collections.emptyMap());
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(scriptedMetric("scripted").params(params).mapScript(mapScript))
.get();
assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
Aggregation aggregation = response.getAggregations().get("scripted");
assertThat(aggregation, notNullValue());
assertThat(aggregation, instanceOf(ScriptedMetric.class));
ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
List<?> aggregationList = (List<?>) scriptedMetricAggregation.aggregation();
assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
long totalCount = 0;
for (Object object : aggregationList) {
assertThat(object, notNullValue());
assertThat(object, instanceOf(List.class));
List<?> list = (List<?>) object;
for (Object o : list) {
assertThat(o, notNullValue());
assertThat(o, instanceOf(Number.class));
Number numberValue = (Number) o;
assertThat(numberValue, equalTo((Number) 1));
totalCount += numberValue.longValue();
}
}
assertThat(totalCount, equalTo(numDocs));
}
public void testMapWithParamsAndImplicitAggMap() {
// Split the params up between the script and the aggregation. // Split the params up between the script and the aggregation.
// Don't put any _agg map in params.
Map<String, Object> scriptParams = Collections.singletonMap("param1", "12"); Map<String, Object> scriptParams = Collections.singletonMap("param1", "12");
Map<String, Object> aggregationParams = Collections.singletonMap("param2", 1); Map<String, Object> aggregationParams = Collections.singletonMap("param2", 1);
// The _agg hashmap will be available even if not declared in the params map Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state[param1] = param2", scriptParams);
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg[param1] = param2", scriptParams);
SearchResponse response = client().prepareSearch("idx") SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -454,7 +407,6 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
SearchResponse response = client() SearchResponse response = client()
@ -466,7 +418,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.initScript( .initScript(
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()))
.mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, .mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"_agg.add(vars.multiplier)", Collections.emptyMap()))) "state.list.add(vars.multiplier)", Collections.emptyMap())))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@ -483,8 +435,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
long totalCount = 0; long totalCount = 0;
for (Object object : aggregationList) { for (Object object : aggregationList) {
assertThat(object, notNullValue()); assertThat(object, notNullValue());
assertThat(object, instanceOf(List.class)); assertThat(object, instanceOf(HashMap.class));
List<?> list = (List<?>) object; Map<String, Object> map = (Map<String, Object>) object;
assertThat(map, hasKey("list"));
assertThat(map.get("list"), instanceOf(List.class));
List<?> list = (List<?>) map.get("list");
for (Object o : list) { for (Object o : list) {
assertThat(o, notNullValue()); assertThat(o, notNullValue());
assertThat(o, instanceOf(Number.class)); assertThat(o, instanceOf(Number.class));
@ -501,12 +456,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -553,13 +507,13 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -607,15 +561,15 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); "sum all states (lists) values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -652,15 +606,15 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); "sum all states (lists) values as a new aggregation", Collections.emptyMap());
SearchResponse searchResponse = client() SearchResponse searchResponse = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -707,14 +661,14 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); "sum all states (lists) values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -749,13 +703,13 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Script reduceScript = Collections.emptyMap());
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"sum all states' state.list values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -789,12 +743,12 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Map<String, Object> varsMap = new HashMap<>(); Map<String, Object> varsMap = new HashMap<>();
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Script reduceScript = Collections.emptyMap());
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"sum all states' state.list values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -828,18 +782,18 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Map<String, Object> reduceParams = new HashMap<>(); Map<String, Object> reduceParams = new HashMap<>();
reduceParams.put("multiplier", 4); reduceParams.put("multiplier", 4);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "multiplied sum aggs of agg values as a new aggregation", reduceParams); "multiplied sum all states (lists) values as a new aggregation", reduceParams);
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -875,7 +829,6 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
SearchResponse response = client() SearchResponse response = client()
@ -916,15 +869,15 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); "sum all states (lists) values as a new aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -977,15 +930,15 @@ public class ScriptedMetricIT extends ESIntegTestCase {
varsMap.put("multiplier", 1); varsMap.put("multiplier", 1);
Map<String, Object> params = new HashMap<>(); Map<String, Object> params = new HashMap<>();
params.put("_agg", new ArrayList<>());
params.put("vars", varsMap); params.put("vars", varsMap);
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); "sum all states (lists) values as a new aggregation", Collections.emptyMap());
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -1021,7 +974,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
* not using a script does get cached. * not using a script does get cached.
*/ */
public void testDontCacheScripts() throws Exception { public void testDontCacheScripts() throws Exception {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg['count'] = 1", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap());
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get()); .get());
@ -1047,7 +1000,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
public void testConflictingAggAndScriptParams() { public void testConflictingAggAndScriptParams() {
Map<String, Object> params = Collections.singletonMap("param1", "12"); Map<String, Object> params = Collections.singletonMap("param1", "12");
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", params); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", params);
SearchRequestBuilder builder = client().prepareSearch("idx") SearchRequestBuilder builder = client().prepareSearch("idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -1056,37 +1009,4 @@ public class ScriptedMetricIT extends ESIntegTestCase {
SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get);
assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters"));
} }
public void testAggFromContext() {
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items = new ArrayList()", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items.add(1)", Collections.emptyMap());
Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context state values", Collections.emptyMap());
Script reduceScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context states",
Collections.emptyMap());
SearchResponse response = client()
.prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(
scriptedMetric("scripted")
.initScript(initScript)
.mapScript(mapScript)
.combineScript(combineScript)
.reduceScript(reduceScript))
.get();
Aggregation aggregation = response.getAggregations().get("scripted");
assertThat(aggregation, notNullValue());
assertThat(aggregation, instanceOf(ScriptedMetric.class));
ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
assertThat(scriptedMetricAggregation.aggregation(), instanceOf(Integer.class));
Integer aggResult = (Integer) scriptedMetricAggregation.aggregation();
long totalAgg = aggResult.longValue();
assertThat(totalAgg, equalTo(numDocs));
}
} }

View File

@ -0,0 +1,109 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.scripted;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Predicate;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.sameInstance;
/**
* This test verifies that the _aggs param is added correctly when the system property
* "es.aggregations.enable_scripted_metric_agg_param" is set to true.
*/
public class InternalScriptedMetricAggStateV6CompatTests extends InternalAggregationTestCase<InternalScriptedMetric> {
private static final String REDUCE_SCRIPT_NAME = "reduceScript";
@Override
protected InternalScriptedMetric createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
Script reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME, Collections.emptyMap());
return new InternalScriptedMetric(name, "agg value", reduceScript, pipelineAggregators, metaData);
}
/**
* Mock of the script service. The script that is run looks at the
* "_aggs" parameter to verify that it was put in place by InternalScriptedMetric.
*/
@Override
protected ScriptService mockScriptService() {
Function<Map<String, Object>, Object> script = params -> {
Object aggs = params.get("_aggs");
Object states = params.get("states");
assertThat(aggs, instanceOf(List.class));
assertThat(aggs, sameInstance(states));
return aggs;
};
@SuppressWarnings("unchecked")
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME,
Collections.singletonMap(REDUCE_SCRIPT_NAME, script));
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
}
@Override
protected void assertReduced(InternalScriptedMetric reduced, List<InternalScriptedMetric> inputs) {
assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING);
}
@Override
protected Reader<InternalScriptedMetric> instanceReader() {
return InternalScriptedMetric::new;
}
@Override
protected void assertFromXContent(InternalScriptedMetric aggregation, ParsedAggregation parsedAggregation) {}
@Override
protected Predicate<String> excludePathsFromXContentInsertion() {
return path -> path.contains(CommonFields.VALUE.getPreferredName());
}
@Override
protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) {
String name = instance.getName();
Object value = instance.aggregation();
Script reduceScript = instance.reduceScript;
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
Map<String, Object> metaData = instance.getMetaData();
return new InternalScriptedMetric(name + randomAlphaOfLength(5), value, reduceScript, pipelineAggregators,
metaData);
}
}

View File

@ -107,7 +107,7 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase<Int
/** /**
* Mock of the script service. The script that is run looks at the * Mock of the script service. The script that is run looks at the
* "_aggs" parameter visible when executing the script and simply returns the count. * "states" context variable visible when executing the script and simply returns the count.
* This should be equal to the number of input InternalScriptedMetrics that are reduced * This should be equal to the number of input InternalScriptedMetrics that are reduced
* in total. * in total.
*/ */
@ -116,7 +116,7 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase<Int
// mock script always retuns the size of the input aggs list as result // mock script always retuns the size of the input aggs list as result
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME,
Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List<Object>) script.get("_aggs")).size())); Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List<Object>) script.get("states")).size()));
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
} }

View File

@ -0,0 +1,180 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.scripted;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.sameInstance;
/**
* This test verifies that the _agg param is added correctly when the system property
* "es.aggregations.enable_scripted_metric_agg_param" is set to true.
*/
public class ScriptedMetricAggregatorAggStateV6CompatTests extends AggregatorTestCase {
private static final String AGG_NAME = "scriptedMetric";
private static final Script INIT_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScript", Collections.emptyMap());
private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap());
private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript",
Collections.emptyMap());
private static final Script INIT_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME,
"initScriptExplicitAgg", Collections.emptyMap());
private static final Script MAP_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME,
"mapScriptExplicitAgg", Collections.emptyMap());
private static final Script COMBINE_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME,
"combineScriptExplicitAgg", Collections.emptyMap());
private static final String EXPLICIT_AGG_OBJECT = "Explicit agg object";
private static final Map<String, Function<Map<String, Object>, Object>> SCRIPTS = new HashMap<>();
@BeforeClass
@SuppressWarnings("unchecked")
public static void initMockScripts() {
// If _agg is provided implicitly, it should be the same objects as "state" from the context.
SCRIPTS.put("initScript", params -> {
Object agg = params.get("_agg");
Object state = params.get("state");
assertThat(agg, instanceOf(Map.class));
assertThat(agg, sameInstance(state));
return agg;
});
SCRIPTS.put("mapScript", params -> {
Object agg = params.get("_agg");
Object state = params.get("state");
assertThat(agg, instanceOf(Map.class));
assertThat(agg, sameInstance(state));
return agg;
});
SCRIPTS.put("combineScript", params -> {
Object agg = params.get("_agg");
Object state = params.get("state");
assertThat(agg, instanceOf(Map.class));
assertThat(agg, sameInstance(state));
return agg;
});
SCRIPTS.put("initScriptExplicitAgg", params -> {
Object agg = params.get("_agg");
assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT));
return agg;
});
SCRIPTS.put("mapScriptExplicitAgg", params -> {
Object agg = params.get("_agg");
assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT));
return agg;
});
SCRIPTS.put("combineScriptExplicitAgg", params -> {
Object agg = params.get("_agg");
assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT));
return agg;
});
}
/**
* Test that the _agg param is implicitly added
*/
public void testWithImplicitAggParam() throws IOException {
try (Directory directory = newDirectory()) {
Integer numDocs = 10;
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
}
}
assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING);
}
/**
* Test that an explicitly added _agg param is honored
*/
public void testWithExplicitAggParam() throws IOException {
try (Directory directory = newDirectory()) {
Integer numDocs = 10;
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
Map<String, Object> aggParams = new HashMap<>();
aggParams.put("_agg", EXPLICIT_AGG_OBJECT);
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder
.params(aggParams)
.initScript(INIT_SCRIPT_EXPLICIT_AGG)
.mapScript(MAP_SCRIPT_EXPLICIT_AGG)
.combineScript(COMBINE_SCRIPT_EXPLICIT_AGG);
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
}
}
assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING);
}
/**
* We cannot use Mockito for mocking QueryShardContext in this case because
* script-related methods (e.g. QueryShardContext#getLazyExecutableScript)
* is final and cannot be mocked
*/
@Override
protected QueryShardContext queryShardContextMock(MapperService mapperService) {
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS);
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
return new QueryShardContext(0, mapperService.getIndexSettings(), null, null, mapperService, null, scriptService,
xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null);
}
}

View File

@ -83,72 +83,72 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static void initMockScripts() { public static void initMockScripts() {
SCRIPTS.put("initScript", params -> { SCRIPTS.put("initScript", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
agg.put("collector", new ArrayList<Integer>()); state.put("collector", new ArrayList<Integer>());
return agg; return state;
}); });
SCRIPTS.put("mapScript", params -> { SCRIPTS.put("mapScript", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
((List<Integer>) agg.get("collector")).add(1); // just add 1 for each doc the script is run on ((List<Integer>) state.get("collector")).add(1); // just add 1 for each doc the script is run on
return agg; return state;
}); });
SCRIPTS.put("combineScript", params -> { SCRIPTS.put("combineScript", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
return ((List<Integer>) agg.get("collector")).stream().mapToInt(Integer::intValue).sum(); return ((List<Integer>) state.get("collector")).stream().mapToInt(Integer::intValue).sum();
}); });
SCRIPTS.put("initScriptScore", params -> { SCRIPTS.put("initScriptScore", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
agg.put("collector", new ArrayList<Double>()); state.put("collector", new ArrayList<Double>());
return agg; return state;
}); });
SCRIPTS.put("mapScriptScore", params -> { SCRIPTS.put("mapScriptScore", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
((List<Double>) agg.get("collector")).add(((Number) params.get("_score")).doubleValue()); ((List<Double>) state.get("collector")).add(((Number) params.get("_score")).doubleValue());
return agg; return state;
}); });
SCRIPTS.put("combineScriptScore", params -> { SCRIPTS.put("combineScriptScore", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
return ((List<Double>) agg.get("collector")).stream().mapToDouble(Double::doubleValue).sum(); return ((List<Double>) state.get("collector")).stream().mapToDouble(Double::doubleValue).sum();
}); });
SCRIPTS.put("initScriptParams", params -> { SCRIPTS.put("initScriptParams", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
Integer initialValue = (Integer)params.get("initialValue"); Integer initialValue = (Integer)params.get("initialValue");
ArrayList<Integer> collector = new ArrayList<>(); ArrayList<Integer> collector = new ArrayList<>();
collector.add(initialValue); collector.add(initialValue);
agg.put("collector", collector); state.put("collector", collector);
return agg; return state;
}); });
SCRIPTS.put("mapScriptParams", params -> { SCRIPTS.put("mapScriptParams", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
Integer itemValue = (Integer) params.get("itemValue"); Integer itemValue = (Integer) params.get("itemValue");
((List<Integer>) agg.get("collector")).add(itemValue); ((List<Integer>) state.get("collector")).add(itemValue);
return agg; return state;
}); });
SCRIPTS.put("combineScriptParams", params -> { SCRIPTS.put("combineScriptParams", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
int divisor = ((Integer) params.get("divisor")); int divisor = ((Integer) params.get("divisor"));
return ((List<Integer>) agg.get("collector")).stream().mapToInt(Integer::intValue).map(i -> i / divisor).sum(); return ((List<Integer>) state.get("collector")).stream().mapToInt(Integer::intValue).map(i -> i / divisor).sum();
}); });
SCRIPTS.put("initScriptSelfRef", params -> { SCRIPTS.put("initScriptSelfRef", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
agg.put("collector", new ArrayList<Integer>()); state.put("collector", new ArrayList<Integer>());
agg.put("selfRef", agg); state.put("selfRef", state);
return agg; return state;
}); });
SCRIPTS.put("mapScriptSelfRef", params -> { SCRIPTS.put("mapScriptSelfRef", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
agg.put("selfRef", agg); state.put("selfRef", state);
return agg; return state;
}); });
SCRIPTS.put("combineScriptSelfRef", params -> { SCRIPTS.put("combineScriptSelfRef", params -> {
Map<String, Object> agg = (Map<String, Object>) params.get("_agg"); Map<String, Object> state = (Map<String, Object>) params.get("state");
agg.put("selfRef", agg); state.put("selfRef", state);
return agg; return state;
}); });
} }
@ -170,7 +170,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
/** /**
* without combine script, the "_aggs" map should contain a list of the size of the number of documents matched * without combine script, the "states" map should contain a list of the size of the number of documents matched
*/ */
public void testScriptedMetricWithoutCombine() throws IOException { public void testScriptedMetricWithoutCombine() throws IOException {
try (Directory directory = newDirectory()) { try (Directory directory = newDirectory()) {

View File

@ -493,15 +493,24 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
} }
public void testGeoField() throws Exception { public void testGeoField() throws Exception {
// Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5);
// Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder mapping = jsonBuilder(); XContentBuilder mapping = jsonBuilder();
mapping.startObject(); mapping.startObject();
mapping.startObject(TYPE); mapping.startObject(TYPE);
mapping.startObject("properties"); mapping.startObject("properties");
mapping.startObject("location");
mapping.startObject("properties");
mapping.startObject("pin"); mapping.startObject("pin");
mapping.field("type", "geo_point"); mapping.field("type", "geo_point");
// Enable store and disable indexing sometimes
if (randomBoolean()) {
mapping.field("store", "true");
}
if (randomBoolean()) {
mapping.field("index", "false");
}
mapping.endObject(); // pin
mapping.endObject(); mapping.endObject();
mapping.endObject(); // location
mapping.startObject(FIELD); mapping.startObject(FIELD);
mapping.field("type", "completion"); mapping.field("type", "completion");
mapping.field("analyzer", "simple"); mapping.field("analyzer", "simple");
@ -510,7 +519,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
mapping.startObject(); mapping.startObject();
mapping.field("name", "st"); mapping.field("name", "st");
mapping.field("type", "geo"); mapping.field("type", "geo");
mapping.field("path", "pin"); mapping.field("path", "location.pin");
mapping.field("precision", 5); mapping.field("precision", 5);
mapping.endObject(); mapping.endObject();
mapping.endArray(); mapping.endArray();
@ -524,7 +533,9 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
XContentBuilder source1 = jsonBuilder() XContentBuilder source1 = jsonBuilder()
.startObject() .startObject()
.startObject("location")
.latlon("pin", 52.529172, 13.407333) .latlon("pin", 52.529172, 13.407333)
.endObject()
.startObject(FIELD) .startObject(FIELD)
.array("input", "Hotel Amsterdam in Berlin") .array("input", "Hotel Amsterdam in Berlin")
.endObject() .endObject()
@ -533,7 +544,9 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
XContentBuilder source2 = jsonBuilder() XContentBuilder source2 = jsonBuilder()
.startObject() .startObject()
.startObject("location")
.latlon("pin", 52.363389, 4.888695) .latlon("pin", 52.363389, 4.888695)
.endObject()
.startObject(FIELD) .startObject(FIELD)
.array("input", "Hotel Berlin in Amsterdam") .array("input", "Hotel Berlin in Amsterdam")
.endObject() .endObject()
@ -600,6 +613,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
private void createIndexAndMapping(CompletionMappingBuilder completionMappingBuilder) throws IOException { private void createIndexAndMapping(CompletionMappingBuilder completionMappingBuilder) throws IOException {
createIndexAndMappingAndSettings(Settings.EMPTY, completionMappingBuilder); createIndexAndMappingAndSettings(Settings.EMPTY, completionMappingBuilder);
} }
private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) throws IOException { private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) throws IOException {
XContentBuilder mapping = jsonBuilder().startObject() XContentBuilder mapping = jsonBuilder().startObject()
.startObject(TYPE).startObject("properties") .startObject(TYPE).startObject("properties")

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -200,6 +201,70 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
assertContextSuggestFields(fields, 3); assertContextSuggestFields(fields, 3);
} }
public void testMalformedGeoField() throws Exception {
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject("type1");
mapping.startObject("properties");
mapping.startObject("pin");
String type = randomFrom("text", "keyword", "long");
mapping.field("type", type);
mapping.endObject();
mapping.startObject("suggestion");
mapping.field("type", "completion");
mapping.field("analyzer", "simple");
mapping.startArray("contexts");
mapping.startObject();
mapping.field("name", "st");
mapping.field("type", "geo");
mapping.field("path", "pin");
mapping.field("precision", 5);
mapping.endObject();
mapping.endArray();
mapping.endObject();
mapping.endObject();
mapping.endObject();
mapping.endObject();
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> createIndex("test", Settings.EMPTY, "type1", mapping));
assertThat(ex.getMessage(), equalTo("field [pin] referenced in context [st] must be mapped to geo_point, found [" + type + "]"));
}
public void testMissingGeoField() throws Exception {
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject("type1");
mapping.startObject("properties");
mapping.startObject("suggestion");
mapping.field("type", "completion");
mapping.field("analyzer", "simple");
mapping.startArray("contexts");
mapping.startObject();
mapping.field("name", "st");
mapping.field("type", "geo");
mapping.field("path", "pin");
mapping.field("precision", 5);
mapping.endObject();
mapping.endArray();
mapping.endObject();
mapping.endObject();
mapping.endObject();
mapping.endObject();
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> createIndex("test", Settings.EMPTY, "type1", mapping));
assertThat(ex.getMessage(), equalTo("field [pin] referenced in context [st] is not defined in the mapping"));
}
public void testParsingQueryContextBasic() throws Exception { public void testParsingQueryContextBasic() throws Exception {
XContentBuilder builder = jsonBuilder().value("ezs42e44yx96"); XContentBuilder builder = jsonBuilder().value("ezs42e44yx96");
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));

View File

@ -722,3 +722,17 @@ setups['sensor_prefab_data'] = '''
{"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} {"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"}
''' '''
setups['admin_role'] = '''
- do:
xpack.security.put_role:
name: "my_admin_role"
body: >
{
"cluster": ["all"],
"indices": [
{"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}}
],
"run_as": [ "other_user" ],
"metadata" : {"version": 1}
}
'''

View File

@ -16,6 +16,7 @@
=== Data === Data
* <<rollup-get-rollup-caps,Get Rollup Capabilities>> * <<rollup-get-rollup-caps,Get Rollup Capabilities>>
* <<rollup-get-rollup-index-caps,Get Rollup Index Capabilities>>
[float] [float]
[[rollup-search-endpoint]] [[rollup-search-endpoint]]
@ -31,5 +32,6 @@ include::rollup/put-job.asciidoc[]
include::rollup/start-job.asciidoc[] include::rollup/start-job.asciidoc[]
include::rollup/stop-job.asciidoc[] include::rollup/stop-job.asciidoc[]
include::rollup/rollup-caps.asciidoc[] include::rollup/rollup-caps.asciidoc[]
include::rollup/rollup-index-caps.asciidoc[]
include::rollup/rollup-search.asciidoc[] include::rollup/rollup-search.asciidoc[]
include::rollup/rollup-job-config.asciidoc[] include::rollup/rollup-job-config.asciidoc[]

View File

@ -27,8 +27,8 @@ live?
==== Path Parameters ==== Path Parameters
`index`:: `index`::
(string) Index, indices or index-pattern to return rollup capabilities for. If omitted (or `_all` is used) all available (string) Index, indices or index-pattern to return rollup capabilities for. `_all` may be used to fetch
rollup job capabilities will be returned rollup capabilities from all jobs
==== Request Body ==== Request Body

View File

@ -26,15 +26,13 @@ This API will allow you to determine:
`index`:: `index`::
(string) Index or index-pattern of concrete rollup indices to check for capabilities. (string) Index or index-pattern of concrete rollup indices to check for capabilities.
==== Request Body ==== Request Body
There is no request body for the Get Jobs API. There is no request body for the Get Jobs API.
==== Authorization ==== Authorization
You must have `monitor`, `monitor_rollup`, `manage` or `manage_rollup` cluster privileges to use this API. You must have the `read` index privilege on the index that stores the rollup results.
For more information, see For more information, see
{xpack-ref}/security-privileges.html[Security Privileges]. {xpack-ref}/security-privileges.html[Security Privileges].

View File

@ -82,6 +82,12 @@ In the above example, there are several pieces of logistical configuration for t
will tend to execute faster, but will require more memory during processing. This has no effect on how the data is rolled up, it is will tend to execute faster, but will require more memory during processing. This has no effect on how the data is rolled up, it is
merely used for tweaking the speed/memory cost of the indexer. merely used for tweaking the speed/memory cost of the indexer.
[NOTE]
The `index_pattern` cannot be a pattern that would also match the destination `rollup_index`. E.g. the pattern
`"foo-*"` would match the rollup index `"foo-rollup"`. This causes problems because the rollup job would attempt
to rollup it's own data at runtime. If you attempt to configure a pattern that matches the `rollup_index`, an exception
will be thrown to prevent this behavior.
[[rollup-groups-config]] [[rollup-groups-config]]
==== Grouping Config ==== Grouping Config

View File

@ -34,7 +34,7 @@ or using `_all`, is not permitted
The request body supports a subset of features from the regular Search API. It supports: The request body supports a subset of features from the regular Search API. It supports:
- `query` param for specifying an DSL query, subject to some limitations - `query` param for specifying an DSL query, subject to some limitations (see <<rollup-search-limitations>> and <<rollup-agg-limitations>>
- `aggregations` param for specifying aggregations - `aggregations` param for specifying aggregations
Functionality that is not available: Functionality that is not available:

View File

@ -2,20 +2,26 @@
[[security-api]] [[security-api]]
== Security APIs == Security APIs
You can use the following APIs to perform {security} activities.
* <<security-api-authenticate>> * <<security-api-authenticate>>
* <<security-api-clear-cache>> * <<security-api-clear-cache>>
* <<security-api-privileges>> * <<security-api-privileges>>
* <<security-api-roles>>
* <<security-api-role-mapping>> * <<security-api-role-mapping>>
* <<security-api-ssl>> * <<security-api-ssl>>
* <<security-api-tokens>> * <<security-api-tokens>>
* <<security-api-users>> * <<security-api-users>>
include::security/roles.asciidoc[]
include::security/authenticate.asciidoc[] include::security/authenticate.asciidoc[]
include::security/change-password.asciidoc[] include::security/change-password.asciidoc[]
include::security/clear-cache.asciidoc[] include::security/clear-cache.asciidoc[]
include::security/clear-roles-cache.asciidoc[]
include::security/create-roles.asciidoc[]
include::security/delete-roles.asciidoc[]
include::security/get-roles.asciidoc[]
include::security/privileges.asciidoc[] include::security/privileges.asciidoc[]
include::security/roles.asciidoc[]
include::security/role-mapping.asciidoc[] include::security/role-mapping.asciidoc[]
include::security/ssl.asciidoc[] include::security/ssl.asciidoc[]
include::security/tokens.asciidoc[] include::security/tokens.asciidoc[]

View File

@ -0,0 +1,39 @@
[role="xpack"]
[[security-api-clear-role-cache]]
=== Clear roles cache API
Evicts roles from the native role cache.
==== Request
`POST /_xpack/security/role/<name>/_clear_cache`
==== Description
For more information about the native realm, see
{stack-ov}/realms.html[Realms] and <<configuring-native-realm>>.
==== Path Parameters
`name`::
(string) The name of the role.
//==== Request Body
==== Authorization
To use this API, you must have at least the `manage_security` cluster
privilege.
==== Examples
The clear roles cache API evicts roles from the native role cache. For example,
to clear the cache for `my_admin_role`:
[source,js]
--------------------------------------------------
POST /_xpack/security/role/my_admin_role/_clear_cache
--------------------------------------------------
// CONSOLE

View File

@ -0,0 +1,102 @@
[role="xpack"]
[[security-api-put-role]]
=== Create roles API
Adds roles in the native realm.
==== Request
`POST /_xpack/security/role/<name>` +
`PUT /_xpack/security/role/<name>`
==== Description
The role API is generally the preferred way to manage roles, rather than using
file-based role management. For more information about the native realm, see
{stack-ov}/realms.html[Realms] and <<configuring-native-realm>>.
==== Path Parameters
`name`::
(string) The name of the role.
==== Request Body
The following parameters can be specified in the body of a PUT or POST request
and pertain to adding a role:
`cluster`:: (list) A list of cluster privileges. These privileges define the
cluster level actions that users with this role are able to execute.
`indices`:: (list) A list of indices permissions entries.
`field_security`::: (list) The document fields that the owners of the role have
read access to. For more information, see
{stack-ov}/field-and-document-access-control.html[Setting up field and document level security].
`names` (required)::: (list) A list of indices (or index name patterns) to which the
permissions in this entry apply.
`privileges`(required)::: (list) The index level privileges that the owners of the role
have on the specified indices.
`query`::: A search query that defines the documents the owners of the role have
read access to. A document within the specified indices must match this query in
order for it to be accessible by the owners of the role.
`metadata`:: (object) Optional meta-data. Within the `metadata` object, keys
that begin with `_` are reserved for system usage.
`run_as`:: (list) A list of users that the owners of this role can impersonate.
For more information, see
{stack-ov}/run-as-privilege.html[Submitting requests on behalf of other users].
For more information, see {stack-ov}/defining-roles.html[Defining roles].
==== Authorization
To use this API, you must have at least the `manage_security` cluster
privilege.
==== Examples
The following example adds a role called `my_admin_role`:
[source,js]
--------------------------------------------------
POST /_xpack/security/role/my_admin_role
{
"cluster": ["all"],
"indices": [
{
"names": [ "index1", "index2" ],
"privileges": ["all"],
"field_security" : { // optional
"grant" : [ "title", "body" ]
},
"query": "{\"match\": {\"title\": \"foo\"}}" // optional
}
],
"run_as": [ "other_user" ], // optional
"metadata" : { // optional
"version" : 1
}
}
--------------------------------------------------
// CONSOLE
A successful call returns a JSON structure that shows whether the role has been
created or updated.
[source,js]
--------------------------------------------------
{
"role": {
"created": true <1>
}
}
--------------------------------------------------
// TESTRESPONSE
<1> When an existing role is updated, `created` is set to false.

View File

@ -0,0 +1,53 @@
[role="xpack"]
[[security-api-delete-role]]
=== Delete roles API
Removes roles in the native realm.
==== Request
`DELETE /_xpack/security/role/<name>`
==== Description
The Roles API is generally the preferred way to manage roles, rather than using
file-based role management. For more information about the native realm, see
{stack-ov}/realms.html[Realms] and <<configuring-native-realm>>.
==== Path Parameters
`name`::
(string) The name of the role.
//==== Request Body
==== Authorization
To use this API, you must have at least the `manage_security` cluster
privilege.
==== Examples
The following example deletes a `my_admin_role` role:
[source,js]
--------------------------------------------------
DELETE /_xpack/security/role/my_admin_role
--------------------------------------------------
// CONSOLE
// TEST[setup:admin_role]
If the role is successfully deleted, the request returns `{"found": true}`.
Otherwise, `found` is set to false.
[source,js]
--------------------------------------------------
{
"found" : true
}
--------------------------------------------------
// TESTRESPONSE

View File

@ -0,0 +1,85 @@
[role="xpack"]
[[security-api-get-role]]
=== Get roles API
Retrieves roles in the native realm.
==== Request
`GET /_xpack/security/role` +
`GET /_xpack/security/role/<name>` +
==== Description
For more information about the native realm, see
{stack-ov}/realms.html[Realms] and <<configuring-native-realm>>.
==== Path Parameters
`name`::
(string) The name of the role. You can specify multiple roles as a
comma-separated list. If you do not specify this parameter, the API
returns information about all roles.
//==== Request Body
==== Authorization
To use this API, you must have at least the `manage_security` cluster
privilege.
==== Examples
The following example retrieves information about the `my_admin_role` role in
the native realm:
[source,js]
--------------------------------------------------
GET /_xpack/security/role/my_admin_role
--------------------------------------------------
// CONSOLE
// TEST[setup:admin_role]
A successful call returns an array of roles with the JSON representation of the
role. If the role is not defined in the native realm, the request returns 404.
[source,js]
--------------------------------------------------
{
"my_admin_role": {
"cluster" : [ "all" ],
"indices" : [
{
"names" : [ "index1", "index2" ],
"privileges" : [ "all" ],
"field_security" : {
"grant" : [ "title", "body" ]}
}
],
"applications" : [ ],
"run_as" : [ "other_user" ],
"metadata" : {
"version" : 1
},
"transient_metadata": {
"enabled": true
}
}
}
--------------------------------------------------
// TESTRESPONSE
To retrieve all roles, omit the role name:
[source,js]
--------------------------------------------------
GET /_xpack/security/role
--------------------------------------------------
// CONSOLE
// TEST[continued]
NOTE: If single role is requested, that role is returned as the response. When
requesting multiple roles, an object is returned holding the found roles, each
keyed by the relevant role name.

View File

@ -1,205 +1,9 @@
[role="xpack"] [float]
[[security-api-roles]] [[security-api-roles]]
=== Role Management APIs === Roles
The Roles API enables you to add, remove, and retrieve roles in the `native` You can use the following APIs to add, remove, and retrieve roles in the native realm:
realm.
==== Request * <<security-api-put-role,Create role>>, <<security-api-delete-role,Delete role>>
* <<security-api-clear-role-cache,Clear roles cache>>
`GET /_xpack/security/role` + * <<security-api-get-role,Get roles>>
`GET /_xpack/security/role/<name>` +
`DELETE /_xpack/security/role/<name>` +
`POST /_xpack/security/role/<name>/_clear_cache` +
`POST /_xpack/security/role/<name>` +
`PUT /_xpack/security/role/<name>`
==== Description
The Roles API is generally the preferred way to manage roles, rather than using
file-based role management. For more information, see
{xpack-ref}/authorization.html[Configuring Role-based Access Control].
==== Path Parameters
`name`::
(string) The name of the role. If you do not specify this parameter, the
Get Roles API returns information about all roles.
==== Request Body
The following parameters can be specified in the body of a PUT or POST request
and pertain to adding a role:
`cluster`:: (list) A list of cluster privileges. These privileges define the
cluster level actions that users with this role are able to execute.
`indices`:: (list) A list of indices permissions entries.
`field_security`::: (list) The document fields that the owners of the role have
read access to. For more information, see
{xpack-ref}/field-and-document-access-control.html[Setting Up Field and Document Level Security].
`names` (required)::: (list) A list of indices (or index name patterns) to which the
permissions in this entry apply.
`privileges`(required)::: (list) The index level privileges that the owners of the role
have on the specified indices.
`query`::: A search query that defines the documents the owners of the role have
read access to. A document within the specified indices must match this query in
order for it to be accessible by the owners of the role.
`metadata`:: (object) Optional meta-data. Within the `metadata` object, keys
that begin with `_` are reserved for system usage.
`run_as`:: (list) A list of users that the owners of this role can impersonate.
For more information, see
{xpack-ref}/run-as-privilege.html[Submitting Requests on Behalf of Other Users].
For more information, see {xpack-ref}/defining-roles.html[Defining Roles].
==== Authorization
To use this API, you must have at least the `manage_security` cluster
privilege.
==== Examples
[[security-api-put-role]]
To add a role, submit a PUT or POST request to the `/_xpack/security/role/<rolename>`
endpoint:
[source,js]
--------------------------------------------------
POST /_xpack/security/role/my_admin_role
{
"cluster": ["all"],
"indices": [
{
"names": [ "index1", "index2" ],
"privileges": ["all"],
"field_security" : { // optional
"grant" : [ "title", "body" ]
},
"query": "{\"match\": {\"title\": \"foo\"}}" // optional
}
],
"run_as": [ "other_user" ], // optional
"metadata" : { // optional
"version" : 1
}
}
--------------------------------------------------
// CONSOLE
A successful call returns a JSON structure that shows whether the role has been
created or updated.
[source,js]
--------------------------------------------------
{
"role": {
"created": true <1>
}
}
--------------------------------------------------
// TESTRESPONSE
<1> When an existing role is updated, `created` is set to false.
[[security-api-get-role]]
To retrieve a role from the `native` Security realm, issue a GET request to the
`/_xpack/security/role/<rolename>` endpoint:
[source,js]
--------------------------------------------------
GET /_xpack/security/role/my_admin_role
--------------------------------------------------
// CONSOLE
// TEST[continued]
A successful call returns an array of roles with the JSON representation of the
role. If the role is not defined in the `native` realm, the request 404s.
[source,js]
--------------------------------------------------
{
"my_admin_role": {
"cluster" : [ "all" ],
"indices" : [ {
"names" : [ "index1", "index2" ],
"privileges" : [ "all" ],
"field_security" : {
"grant" : [ "title", "body" ]
},
"query" : "{\"match\": {\"title\": \"foo\"}}"
} ],
"applications" : [ ],
"run_as" : [ "other_user" ],
"metadata" : {
"version" : 1
},
"transient_metadata": {
"enabled": true
}
}
}
--------------------------------------------------
// TESTRESPONSE
You can specify multiple roles as a comma-separated list. To retrieve all roles,
omit the role name.
[source,js]
--------------------------------------------------
# Retrieve roles "r1", "r2", and "my_admin_role"
GET /_xpack/security/role/r1,r2,my_admin_role
# Retrieve all roles
GET /_xpack/security/role
--------------------------------------------------
// CONSOLE
// TEST[continued]
NOTE: If single role is requested, that role is returned as the response. When
requesting multiple roles, an object is returned holding the found roles, each
keyed by the relevant role name.
[[security-api-delete-role]]
To delete a role, submit a DELETE request to the `/_xpack/security/role/<rolename>`
endpoint:
[source,js]
--------------------------------------------------
DELETE /_xpack/security/role/my_admin_role
--------------------------------------------------
// CONSOLE
// TEST[continued]
If the role is successfully deleted, the request returns `{"found": true}`.
Otherwise, `found` is set to false.
[source,js]
--------------------------------------------------
{
"found" : true
}
--------------------------------------------------
// TESTRESPONSE
[[security-api-clear-role-cache]]
The Clear Roles Cache API evicts roles from the native role cache. To clear the
cache for a role, submit a POST request `/_xpack/security/role/<rolename>/_clear_cache`
endpoint:
[source,js]
--------------------------------------------------
POST /_xpack/security/role/my_admin_role/_clear_cache
--------------------------------------------------
// CONSOLE

View File

@ -15,18 +15,19 @@ Most {rollup} endpoints have the following base:
[[rollup-api-jobs]] [[rollup-api-jobs]]
=== /job/ === /job/
* {ref}/rollup-put-job.html[PUT /job/<job_id+++>+++]: Create a job * {ref}/rollup-put-job.html[PUT /_xpack/rollup/job/<job_id+++>+++]: Create a job
* {ref}/rollup-get-job.html[GET /job]: List jobs * {ref}/rollup-get-job.html[GET /_xpack/rollup/job]: List jobs
* {ref}/rollup-get-job.html[GET /job/<job_id+++>+++]: Get job details * {ref}/rollup-get-job.html[GET /_xpack/rollup/job/<job_id+++>+++]: Get job details
* {ref}/rollup-start-job.html[POST /job/<job_id>/_start]: Start a job * {ref}/rollup-start-job.html[POST /_xpack/rollup/job/<job_id>/_start]: Start a job
* {ref}/rollup-stop-job.html[POST /job/<job_id>/_stop]: Stop a job * {ref}/rollup-stop-job.html[POST /_xpack/rollup/job/<job_id>/_stop]: Stop a job
* {ref}/rollup-delete-job.html[DELETE /job/<job_id+++>+++]: Delete a job * {ref}/rollup-delete-job.html[DELETE /_xpack/rollup/job/<job_id+++>+++]: Delete a job
[float] [float]
[[rollup-api-data]] [[rollup-api-data]]
=== /data/ === /data/
* {ref}/rollup-get-rollup-caps.html[GET /data/<index_name+++>/_rollup_caps+++]: Get Rollup Capabilities * {ref}/rollup-get-rollup-caps.html[GET /_xpack/rollup/data/<index_pattern+++>/_rollup_caps+++]: Get Rollup Capabilities
* {ref}/rollup-get-rollup-index-caps.html[GET /<index_name+++>/_rollup/data/+++]: Get Rollup Index Capabilities
[float] [float]
[[rollup-api-index]] [[rollup-api-index]]

View File

@ -20,6 +20,7 @@ So while the cost of storing a millisecond of sensor data from ten years ago is
reading often diminishes with time. It's not useless -- it could easily contribute to a useful analysis -- but it's reduced reading often diminishes with time. It's not useless -- it could easily contribute to a useful analysis -- but it's reduced
value often leads to deletion rather than paying the fixed storage cost. value often leads to deletion rather than paying the fixed storage cost.
[float]
=== Rollup store historical data at reduced granularity === Rollup store historical data at reduced granularity
That's where Rollup comes into play. The Rollup functionality summarizes old, high-granularity data into a reduced That's where Rollup comes into play. The Rollup functionality summarizes old, high-granularity data into a reduced
@ -35,6 +36,7 @@ automates this process of summarizing historical data.
Details about setting up and configuring Rollup are covered in <<rollup-put-job,Create Job API>> Details about setting up and configuring Rollup are covered in <<rollup-put-job,Create Job API>>
[float]
=== Rollup uses standard query DSL === Rollup uses standard query DSL
The Rollup feature exposes a new search endpoint (`/_rollup_search` vs the standard `/_search`) which knows how to search The Rollup feature exposes a new search endpoint (`/_rollup_search` vs the standard `/_search`) which knows how to search
@ -48,6 +50,7 @@ are covered more in <<rollup-search-limitations, Rollup Search limitations>>.
But if your queries, aggregations and dashboards only use the available functionality, redirecting them to historical But if your queries, aggregations and dashboards only use the available functionality, redirecting them to historical
data is trivial. data is trivial.
[float]
=== Rollup merges "live" and "rolled" data === Rollup merges "live" and "rolled" data
A useful feature of Rollup is the ability to query both "live", realtime data in addition to historical "rolled" data A useful feature of Rollup is the ability to query both "live", realtime data in addition to historical "rolled" data
@ -61,6 +64,7 @@ would only see data older than a month. The RollupSearch endpoint, however, sup
It will take the results from both data sources and merge them together. If there is overlap between the "live" and It will take the results from both data sources and merge them together. If there is overlap between the "live" and
"rolled" data, live data is preferred to increase accuracy. "rolled" data, live data is preferred to increase accuracy.
[float]
=== Rollup is multi-interval aware === Rollup is multi-interval aware
Finally, Rollup is capable of intelligently utilizing the best interval available. If you've worked with summarizing Finally, Rollup is capable of intelligently utilizing the best interval available. If you've worked with summarizing

View File

@ -130,7 +130,7 @@ manage roles, log in to {kib} and go to *Management / Elasticsearch / Roles*.
The _Role Management APIs_ enable you to add, update, remove and retrieve roles The _Role Management APIs_ enable you to add, update, remove and retrieve roles
dynamically. When you use the APIs to manage roles in the `native` realm, the dynamically. When you use the APIs to manage roles in the `native` realm, the
roles are stored in an internal {es} index. For more information and examples, roles are stored in an internal {es} index. For more information and examples,
see {ref}/security-api-roles.html[Role Management APIs]. see {ref}/security-api.html#security-api-roles[role management APIs].
[float] [float]
[[roles-management-file]] [[roles-management-file]]

View File

@ -18,7 +18,7 @@ the API, and other roles that are mapped through files.
When you use role-mappings, you assign existing roles to users. When you use role-mappings, you assign existing roles to users.
The available roles should either be added using the The available roles should either be added using the
{ref}/security-api-roles.html[Role Management APIs] or defined in the {ref}/security-api.html#security-api-roles[role management APIs] or defined in the
<<roles-management-file, roles file>>. Either role-mapping method can use <<roles-management-file, roles file>>. Either role-mapping method can use
either role management method. For example, when you use the role mapping API, either role management method. For example, when you use the role mapping API,
you are able to map users to both API-managed roles and file-managed roles you are able to map users to both API-managed roles and file-managed roles

View File

@ -258,7 +258,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
} }
public boolean isAutodetectProcessUpdate() { public boolean isAutodetectProcessUpdate() {
return modelPlotConfig != null || detectorUpdates != null; return modelPlotConfig != null || detectorUpdates != null || groups != null;
} }
@Override @Override

View File

@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivileg
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@ -34,32 +33,6 @@ public final class PutPrivilegesRequestBuilder extends ActionRequestBuilder<PutP
super(client, action, new PutPrivilegesRequest()); super(client, action, new PutPrivilegesRequest());
} }
/**
* Populate the put privileges request using the given source, application name and privilege name
* The source must contain a single privilege object which matches the application and privilege names.
*/
public PutPrivilegesRequestBuilder source(String applicationName, String expectedName,
BytesReference source, XContentType xContentType)
throws IOException {
Objects.requireNonNull(xContentType);
// EMPTY is ok here because we never call namedObject
try (InputStream stream = source.streamInput();
XContentParser parser = xContentType.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
XContentParser.Token token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
if (token == XContentParser.Token.START_OBJECT) {
final ApplicationPrivilegeDescriptor privilege = parsePrivilege(parser, applicationName, expectedName);
this.request.setPrivileges(Collections.singleton(privilege));
} else {
throw new ElasticsearchParseException("expected an object but found {} instead", token);
}
}
return this;
}
ApplicationPrivilegeDescriptor parsePrivilege(XContentParser parser, String applicationName, String privilegeName) throws IOException { ApplicationPrivilegeDescriptor parsePrivilege(XContentParser parser, String applicationName, String privilegeName) throws IOException {
ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, applicationName, privilegeName, false); ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, applicationName, privilegeName, false);
checkPrivilegeName(privilege, applicationName, privilegeName); checkPrivilegeName(privilege, applicationName, privilegeName);

View File

@ -292,12 +292,6 @@ public class SecurityClient {
return new GetPrivilegesRequestBuilder(client, GetPrivilegesAction.INSTANCE).application(applicationName).privileges(privileges); return new GetPrivilegesRequestBuilder(client, GetPrivilegesAction.INSTANCE).application(applicationName).privileges(privileges);
} }
public PutPrivilegesRequestBuilder preparePutPrivilege(String applicationName, String privilegeName,
BytesReference bytesReference, XContentType xContentType) throws IOException {
return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE)
.source(applicationName, privilegeName, bytesReference, xContentType);
}
public PutPrivilegesRequestBuilder preparePutPrivileges(BytesReference bytesReference, XContentType xContentType) throws IOException { public PutPrivilegesRequestBuilder preparePutPrivileges(BytesReference bytesReference, XContentType xContentType) throws IOException {
return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE).source(bytesReference, xContentType); return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE).source(bytesReference, xContentType);
} }

View File

@ -274,6 +274,8 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
assertTrue(update.isAutodetectProcessUpdate()); assertTrue(update.isAutodetectProcessUpdate());
update = new JobUpdate.Builder("foo").setDetectorUpdates(Collections.singletonList(mock(JobUpdate.DetectorUpdate.class))).build(); update = new JobUpdate.Builder("foo").setDetectorUpdates(Collections.singletonList(mock(JobUpdate.DetectorUpdate.class))).build();
assertTrue(update.isAutodetectProcessUpdate()); assertTrue(update.isAutodetectProcessUpdate());
update = new JobUpdate.Builder("foo").setGroups(Arrays.asList("bar")).build();
assertTrue(update.isAutodetectProcessUpdate());
} }
public void testUpdateAnalysisLimitWithValueGreaterThanMax() { public void testUpdateAnalysisLimitWithValueGreaterThanMax() {

View File

@ -66,6 +66,7 @@ public final class UpdateParams {
return new Builder(jobUpdate.getJobId()) return new Builder(jobUpdate.getJobId())
.modelPlotConfig(jobUpdate.getModelPlotConfig()) .modelPlotConfig(jobUpdate.getModelPlotConfig())
.detectorUpdates(jobUpdate.getDetectorUpdates()) .detectorUpdates(jobUpdate.getDetectorUpdates())
.updateScheduledEvents(jobUpdate.getGroups() != null)
.build(); .build();
} }

View File

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.job.config.DetectionRule;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig;
import org.elasticsearch.xpack.core.ml.job.config.Operator;
import org.elasticsearch.xpack.core.ml.job.config.RuleCondition;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class UpdateParamsTests extends ESTestCase {
public void testFromJobUpdate() {
String jobId = "foo";
DetectionRule rule = new DetectionRule.Builder(Arrays.asList(
new RuleCondition(RuleCondition.AppliesTo.ACTUAL,
Operator.GT, 1.0))).build();
List<DetectionRule> rules = Arrays.asList(rule);
List<JobUpdate.DetectorUpdate> detectorUpdates = Collections.singletonList(
new JobUpdate.DetectorUpdate(2, null, rules));
JobUpdate.Builder updateBuilder = new JobUpdate.Builder(jobId)
.setModelPlotConfig(new ModelPlotConfig())
.setDetectorUpdates(detectorUpdates);
UpdateParams params = UpdateParams.fromJobUpdate(updateBuilder.build());
assertFalse(params.isUpdateScheduledEvents());
assertEquals(params.getDetectorUpdates(), updateBuilder.build().getDetectorUpdates());
assertEquals(params.getModelPlotConfig(), updateBuilder.build().getModelPlotConfig());
params = UpdateParams.fromJobUpdate(updateBuilder.setGroups(Arrays.asList("bar")).build());
assertTrue(params.isUpdateScheduledEvents());
}
}

View File

@ -191,7 +191,6 @@ import org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction;
import org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction;
import org.elasticsearch.xpack.security.rest.action.privilege.RestDeletePrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestDeletePrivilegesAction;
import org.elasticsearch.xpack.security.rest.action.privilege.RestGetPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestGetPrivilegesAction;
import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegeAction;
import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction;
import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction;
import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction;
@ -302,7 +301,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
new TLSLicenseBootstrapCheck(), new TLSLicenseBootstrapCheck(),
new FIPS140SecureSettingsBootstrapCheck(settings, env), new FIPS140SecureSettingsBootstrapCheck(settings, env),
new FIPS140JKSKeystoreBootstrapCheck(settings), new FIPS140JKSKeystoreBootstrapCheck(settings),
new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings))); new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings),
new FIPS140LicenseBootstrapCheck(XPackSettings.FIPS_MODE_ENABLED.get(settings))));
checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); checks.addAll(InternalRealms.getBootstrapChecks(settings, env));
this.bootstrapChecks = Collections.unmodifiableList(checks); this.bootstrapChecks = Collections.unmodifiableList(checks);
Automatons.updateMaxDeterminizedStates(settings); Automatons.updateMaxDeterminizedStates(settings);
@ -762,7 +762,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
new RestSamlInvalidateSessionAction(settings, restController, getLicenseState()), new RestSamlInvalidateSessionAction(settings, restController, getLicenseState()),
new RestGetPrivilegesAction(settings, restController, getLicenseState()), new RestGetPrivilegesAction(settings, restController, getLicenseState()),
new RestPutPrivilegesAction(settings, restController, getLicenseState()), new RestPutPrivilegesAction(settings, restController, getLicenseState()),
new RestPutPrivilegeAction(settings, restController, getLicenseState()),
new RestDeletePrivilegesAction(settings, restController, getLicenseState()) new RestDeletePrivilegesAction(settings, restController, getLicenseState())
); );
} }

View File

@ -1,49 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.rest.action.privilege;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequestBuilder;
import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege;
import org.elasticsearch.xpack.core.security.client.SecurityClient;
import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler;
import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/**
* Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index
*/
public class RestPutPrivilegeAction extends SecurityBaseRestHandler {
public RestPutPrivilegeAction(Settings settings, RestController controller, XPackLicenseState licenseState) {
super(settings, licenseState);
controller.registerHandler(PUT, "/_xpack/security/privilege/{application}/{privilege}", this);
controller.registerHandler(POST, "/_xpack/security/privilege/{application}/{privilege}", this);
}
@Override
public String getName() {
return "xpack_security_put_privilege_action";
}
@Override
public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException {
final String application = request.param("application");
final String privilege = request.param("privilege");
PutPrivilegesRequestBuilder requestBuilder = new SecurityClient(client)
.preparePutPrivilege(application, privilege, request.requiredContent(), request.getXContentType())
.setRefreshPolicy(request.param("refresh"));
return RestPutPrivilegesAction.execute(requestBuilder);
}
}

View File

@ -29,6 +29,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/** /**
* Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index * Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index
@ -37,6 +38,7 @@ public class RestPutPrivilegesAction extends SecurityBaseRestHandler {
public RestPutPrivilegesAction(Settings settings, RestController controller, XPackLicenseState licenseState) { public RestPutPrivilegesAction(Settings settings, RestController controller, XPackLicenseState licenseState) {
super(settings, licenseState); super(settings, licenseState);
controller.registerHandler(PUT, "/_xpack/security/privilege/", this);
controller.registerHandler(POST, "/_xpack/security/privilege/", this); controller.registerHandler(POST, "/_xpack/security/privilege/", this);
} }

View File

@ -52,36 +52,6 @@ public class PutPrivilegesRequestBuilderTests extends ESTestCase {
return new ApplicationPrivilegeDescriptor(app, name, Sets.newHashSet(actions), Collections.emptyMap()); return new ApplicationPrivilegeDescriptor(app, name, Sets.newHashSet(actions), Collections.emptyMap());
} }
public void testBuildRequestFromJsonObject() throws Exception {
final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE);
builder.source("foo", "read", new BytesArray(
"{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }"
), XContentType.JSON);
final List<ApplicationPrivilegeDescriptor> privileges = builder.request().getPrivileges();
assertThat(privileges, iterableWithSize(1));
assertThat(privileges, contains(descriptor("foo", "read", "data:/read/*", "admin:/read/*")));
}
public void testPrivilegeNameValidationOfSingleElement() throws Exception {
final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE);
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->
builder.source("foo", "write", new BytesArray(
"{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }"
), XContentType.JSON));
assertThat(exception.getMessage(), containsString("write"));
assertThat(exception.getMessage(), containsString("read"));
}
public void testApplicationNameValidationOfSingleElement() throws Exception {
final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE);
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->
builder.source("bar", "read", new BytesArray(
"{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }"
), XContentType.JSON));
assertThat(exception.getMessage(), containsString("foo"));
assertThat(exception.getMessage(), containsString("bar"));
}
public void testPrivilegeNameValidationOfMultipleElement() throws Exception { public void testPrivilegeNameValidationOfMultipleElement() throws Exception {
final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE);
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->

View File

@ -1,6 +1,6 @@
{ {
"xpack.security.clear_cached_roles": { "xpack.security.clear_cached_roles": {
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-clear-role-cache", "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-role-cache.html",
"methods": [ "POST" ], "methods": [ "POST" ],
"url": { "url": {
"path": "/_xpack/security/role/{name}/_clear_cache", "path": "/_xpack/security/role/{name}/_clear_cache",

View File

@ -1,6 +1,6 @@
{ {
"xpack.security.delete_role": { "xpack.security.delete_role": {
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-delete-role", "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-role.html",
"methods": [ "DELETE" ], "methods": [ "DELETE" ],
"url": { "url": {
"path": "/_xpack/security/role/{name}", "path": "/_xpack/security/role/{name}",

View File

@ -1,6 +1,6 @@
{ {
"xpack.security.get_role": { "xpack.security.get_role": {
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-get-role", "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html",
"methods": [ "GET" ], "methods": [ "GET" ],
"url": { "url": {
"path": "/_xpack/security/role/{name}", "path": "/_xpack/security/role/{name}",

View File

@ -1,33 +0,0 @@
{
"xpack.security.put_privilege": {
"documentation": "TODO",
"methods": [ "POST", "PUT" ],
"url": {
"path": "/_xpack/security/privilege/{application}/{name}",
"paths": [ "/_xpack/security/privilege/{application}/{name}" ],
"parts": {
"application": {
"type" : "string",
"description" : "Application name",
"required" : true
},
"name": {
"type" : "string",
"description" : "Privilege name",
"required" : true
}
},
"params": {
"refresh": {
"type" : "enum",
"options": ["true", "false", "wait_for"],
"description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes."
}
}
},
"body": {
"description" : "The privilege to add",
"required" : true
}
}
}

View File

@ -1,7 +1,7 @@
{ {
"xpack.security.put_privileges": { "xpack.security.put_privileges": {
"documentation": "TODO", "documentation": "TODO",
"methods": [ "POST" ], "methods": [ "PUT", "POST" ],
"url": { "url": {
"path": "/_xpack/security/privilege/", "path": "/_xpack/security/privilege/",
"paths": [ "paths": [

View File

@ -1,6 +1,6 @@
{ {
"xpack.security.put_role": { "xpack.security.put_role": {
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-put-role", "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role.html",
"methods": [ "PUT", "POST" ], "methods": [ "PUT", "POST" ],
"url": { "url": {
"path": "/_xpack/security/role/{name}", "path": "/_xpack/security/role/{name}",

View File

@ -30,24 +30,26 @@ teardown:
ignore: 404 ignore: 404
--- ---
"Test put and get privileges": "Test put and get privileges":
# Single privilege, with names in URL # Single privilege
- do: - do:
xpack.security.put_privilege: xpack.security.put_privileges:
application: app
name: p1
body: > body: >
{ {
"application": "app", "app": {
"name": "p1", "p1": {
"actions": [ "data:read/*" , "action:login" ], "application": "app",
"metadata": { "name": "p1",
"key1" : "val1a", "actions": [ "data:read/*" , "action:login" ],
"key2" : "val2a" "metadata": {
"key1" : "val1a",
"key2" : "val2a"
}
}
} }
} }
- match: { "app.p1" : { created: true } } - match: { "app.p1" : { created: true } }
# Multiple privileges, no names in URL # Multiple privileges
- do: - do:
xpack.security.put_privileges: xpack.security.put_privileges:
body: > body: >
@ -84,18 +86,18 @@ teardown:
- match: { "app.p3" : { created: true } } - match: { "app.p3" : { created: true } }
- match: { "app2.p1" : { created: true } } - match: { "app2.p1" : { created: true } }
# Update existing privilege, with names in URL # Update existing privilege
- do: - do:
xpack.security.put_privilege: xpack.security.put_privileges:
application: app
name: p1
body: > body: >
{ {
"application": "app", "app": {
"name": "p1", "p1": {
"actions": [ "data:read/*" , "action:login" ], "actions": [ "data:read/*" , "action:login" ],
"metadata": { "metadata": {
"key3" : "val3" "key3" : "val3"
}
}
} }
} }
- match: { "app.p1" : { created: false } } - match: { "app.p1" : { created: false } }

View File

@ -31,21 +31,25 @@ setup:
} }
- do: - do:
xpack.security.put_privilege: xpack.security.put_privileges:
application: app-allow
name: read
body: > body: >
{ {
"actions": [ "data:read/*" ] "app-allow": {
"read": {
"actions": [ "data:read/*" ]
}
}
} }
- do: - do:
xpack.security.put_privilege: xpack.security.put_privileges:
application: app_deny
name: read
body: > body: >
{ {
"actions": [ "data:read/*" ] "app-deny": {
"read": {
"actions": [ "data:read/*" ]
}
}
} }
--- ---
@ -82,12 +86,14 @@ teardown:
- do: - do:
headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user
xpack.security.put_privilege: xpack.security.put_privileges:
application: app
name: read
body: > body: >
{ {
"actions": [ "data:read/*" ] "app": {
"read": {
"actions": [ "data:read/*" ]
}
}
} }
- match: { "app.read" : { created: true } } - match: { "app.read" : { created: true } }
@ -112,12 +118,14 @@ teardown:
"Test put application privileges when not allowed": "Test put application privileges when not allowed":
- do: - do:
headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user
xpack.security.put_privilege: xpack.security.put_privileges:
application: app_deny
name: write
body: > body: >
{ {
"actions": [ "data:write/*" ] "app_deny": {
"write": {
"actions": [ "data:write/*" ]
}
}
} }
catch: forbidden catch: forbidden

View File

@ -300,6 +300,10 @@ public class AnalysisConfig implements ToXContentObject {
multivariateByFields); multivariateByFields);
} }
public static Builder builder(List<Detector> detectors) {
return new Builder(detectors);
}
public static class Builder { public static class Builder {
private List<Detector> detectors; private List<Detector> detectors;

View File

@ -265,6 +265,10 @@ public class Detector implements ToXContentObject {
excludeFrequent, rules, detectorIndex); excludeFrequent, rules, detectorIndex);
} }
public static Builder builder() {
return new Builder();
}
public static class Builder { public static class Builder {
private String detectorDescription; private String detectorDescription;

View File

@ -412,6 +412,10 @@ public class Job implements ToXContentObject {
return Strings.toString(this); return Strings.toString(this);
} }
public static Builder builder(String id) {
return new Builder(id);
}
public static class Builder { public static class Builder {
private String id; private String id;
@ -435,7 +439,7 @@ public class Job implements ToXContentObject {
private String resultsIndexName; private String resultsIndexName;
private boolean deleted; private boolean deleted;
public Builder() { private Builder() {
} }
public Builder(String id) { public Builder(String id) {

View File

@ -1,8 +0,0 @@
import org.elasticsearch.gradle.test.RestIntegTestTask
// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737
if (project.inFipsJvm) {
tasks.withType(RestIntegTestTask) {
enabled = false
}
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetRecordsAction;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Detector;
@ -36,7 +37,11 @@ public class BasicRenormalizationIT extends MlNativeAutodetectIntegTestCase {
String jobId = "basic-renormalization-it-test-default-renormalization-job"; String jobId = "basic-renormalization-it-test-default-renormalization-job";
createAndRunJob(jobId, null); createAndRunJob(jobId, null);
List<AnomalyRecord> records = getRecords(jobId); GetRecordsAction.Request getRecordsRequest = new GetRecordsAction.Request(jobId);
// Setting the record score to 10.0, to avoid the low score records due to multibucket trailing effect
getRecordsRequest.setRecordScore(10.0);
List<AnomalyRecord> records = getRecords(getRecordsRequest);
assertThat(records.size(), equalTo(2)); assertThat(records.size(), equalTo(2));
AnomalyRecord laterRecord = records.get(0); AnomalyRecord laterRecord = records.get(0);
assertThat(laterRecord.getActual().get(0), equalTo(100.0)); assertThat(laterRecord.getActual().get(0), equalTo(100.0));

View File

@ -12,11 +12,13 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction;
import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction;
import org.elasticsearch.xpack.core.ml.action.UpdateJobAction;
import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.junit.After; import org.junit.After;
@ -193,9 +195,9 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase {
/** /**
* Test an open job picks up changes to scheduled events/calendars * Test an open job picks up changes to scheduled events/calendars
*/ */
public void testOnlineUpdate() throws Exception { public void testAddEventsToOpenJob() throws Exception {
TimeValue bucketSpan = TimeValue.timeValueMinutes(30); TimeValue bucketSpan = TimeValue.timeValueMinutes(30);
Job.Builder job = createJob("scheduled-events-online-update", bucketSpan); Job.Builder job = createJob("scheduled-events-add-events-to-open-job", bucketSpan);
long startTime = 1514764800000L; long startTime = 1514764800000L;
final int bucketCount = 5; final int bucketCount = 5;
@ -209,7 +211,7 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase {
// Now create a calendar and events for the job while it is open // Now create a calendar and events for the job while it is open
String calendarId = "test-calendar-online-update"; String calendarId = "test-calendar-online-update";
putCalendar(calendarId, Collections.singletonList(job.getId()), "testOnlineUpdate calendar"); putCalendar(calendarId, Collections.singletonList(job.getId()), "testAddEventsToOpenJob calendar");
List<ScheduledEvent> events = new ArrayList<>(); List<ScheduledEvent> events = new ArrayList<>();
long eventStartTime = startTime + (bucketCount + 1) * bucketSpan.millis(); long eventStartTime = startTime + (bucketCount + 1) * bucketSpan.millis();
@ -257,6 +259,81 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase {
assertEquals(0, buckets.get(8).getScheduledEvents().size()); assertEquals(0, buckets.get(8).getScheduledEvents().size());
} }
/**
* An open job that later gets added to a calendar, should take the scheduled events into account
*/
public void testAddOpenedJobToGroupWithCalendar() throws Exception {
TimeValue bucketSpan = TimeValue.timeValueMinutes(30);
String groupName = "opened-calendar-job-group";
Job.Builder job = createJob("scheduled-events-add-opened-job-to-group-with-calendar", bucketSpan);
long startTime = 1514764800000L;
final int bucketCount = 5;
// Open the job
openJob(job.getId());
// write some buckets of data
postData(job.getId(), generateData(startTime, bucketSpan, bucketCount, bucketIndex -> randomIntBetween(100, 200))
.stream().collect(Collectors.joining()));
String calendarId = "test-calendar-open-job-update";
// Create a new calendar referencing groupName
putCalendar(calendarId, Collections.singletonList(groupName), "testAddOpenedJobToGroupWithCalendar calendar");
// Put events in the calendar
List<ScheduledEvent> events = new ArrayList<>();
long eventStartTime = startTime + (bucketCount + 1) * bucketSpan.millis();
long eventEndTime = eventStartTime + (long)(1.5 * bucketSpan.millis());
events.add(new ScheduledEvent.Builder().description("Some Event")
.startTime(ZonedDateTime.ofInstant(Instant.ofEpochMilli(eventStartTime), ZoneOffset.UTC))
.endTime(ZonedDateTime.ofInstant(Instant.ofEpochMilli(eventEndTime), ZoneOffset.UTC))
.calendarId(calendarId).build());
postScheduledEvents(calendarId, events);
// Update the job to be a member of the group
UpdateJobAction.Request jobUpdateRequest = new UpdateJobAction.Request(job.getId(),
new JobUpdate.Builder(job.getId()).setGroups(Collections.singletonList(groupName)).build());
client().execute(UpdateJobAction.INSTANCE, jobUpdateRequest).actionGet();
// Wait until the notification that the job was updated is indexed
assertBusy(() -> {
SearchResponse searchResponse = client().prepareSearch(".ml-notifications")
.setSize(1)
.addSort("timestamp", SortOrder.DESC)
.setQuery(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("job_id", job.getId()))
.filter(QueryBuilders.termQuery("level", "info"))
).get();
SearchHit[] hits = searchResponse.getHits().getHits();
assertThat(hits.length, equalTo(1));
assertThat(hits[0].getSourceAsMap().get("message"), equalTo("Job updated: [groups]"));
});
// write some more buckets of data that cover the scheduled event period
postData(job.getId(), generateData(startTime + bucketCount * bucketSpan.millis(), bucketSpan, 5,
bucketIndex -> randomIntBetween(100, 200))
.stream().collect(Collectors.joining()));
// and close
closeJob(job.getId());
GetBucketsAction.Request getBucketsRequest = new GetBucketsAction.Request(job.getId());
List<Bucket> buckets = getBuckets(getBucketsRequest);
// the first 6 buckets have no events
for (int i=0; i<=bucketCount; i++) {
assertEquals(0, buckets.get(i).getScheduledEvents().size());
}
// 7th and 8th buckets have the event but the last one does not
assertEquals(1, buckets.get(6).getScheduledEvents().size());
assertEquals("Some Event", buckets.get(6).getScheduledEvents().get(0));
assertEquals(1, buckets.get(7).getScheduledEvents().size());
assertEquals("Some Event", buckets.get(7).getScheduledEvents().get(0));
assertEquals(0, buckets.get(8).getScheduledEvents().size());
}
private Job.Builder createJob(String jobId, TimeValue bucketSpan) { private Job.Builder createJob(String jobId, TimeValue bucketSpan) {
Detector.Builder detector = new Detector.Builder("count", null); Detector.Builder detector = new Detector.Builder("count", null);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));

View File

@ -240,6 +240,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
} }
} }
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32966")
public void testHRDSplit() throws Exception { public void testHRDSplit() throws Exception {
// Create job // Create job

View File

@ -1,10 +1 @@
import org.elasticsearch.gradle.test.RestIntegTestTask
// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737
if (project.inFipsJvm) {
tasks.withType(RestIntegTestTask) {
enabled = false
}
}
group = "${group}.x-pack.qa.rolling-upgrade.with-system-key" group = "${group}.x-pack.qa.rolling-upgrade.with-system-key"