Merge remote-tracking branch 'origin/master' into index-lifecycle
This commit is contained in:
commit
7ff11b4ae1
|
@ -825,9 +825,6 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: remove this once joda time is removed from scripting in 7.0
|
|
||||||
systemProperty 'es.scripting.use_java_time', 'true'
|
|
||||||
|
|
||||||
// TODO: remove this once ctx isn't added to update script params in 7.0
|
// TODO: remove this once ctx isn't added to update script params in 7.0
|
||||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||||
|
|
||||||
|
|
|
@ -1,67 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.gradle.precommit
|
|
||||||
|
|
||||||
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
|
||||||
import org.elasticsearch.gradle.LoggedExec
|
|
||||||
import org.gradle.api.file.FileCollection
|
|
||||||
import org.gradle.api.tasks.Classpath
|
|
||||||
import org.gradle.api.tasks.OutputFile
|
|
||||||
/**
|
|
||||||
* Runs CheckJarHell on a classpath.
|
|
||||||
*/
|
|
||||||
public class JarHellTask extends LoggedExec {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We use a simple "marker" file that we touch when the task succeeds
|
|
||||||
* as the task output. This is compared against the modified time of the
|
|
||||||
* inputs (ie the jars/class files).
|
|
||||||
*/
|
|
||||||
@OutputFile
|
|
||||||
File successMarker
|
|
||||||
|
|
||||||
@Classpath
|
|
||||||
FileCollection classpath
|
|
||||||
|
|
||||||
public JarHellTask() {
|
|
||||||
successMarker = new File(project.buildDir, 'markers/jarHell-' + getName())
|
|
||||||
project.afterEvaluate {
|
|
||||||
FileCollection classpath = project.sourceSets.test.runtimeClasspath
|
|
||||||
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
|
||||||
classpath += project.configurations.bundle
|
|
||||||
}
|
|
||||||
inputs.files(classpath)
|
|
||||||
dependsOn(classpath)
|
|
||||||
description = "Runs CheckJarHell on ${classpath}"
|
|
||||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
|
||||||
doFirst({
|
|
||||||
/* JarHell doesn't like getting directories that don't exist but
|
|
||||||
gradle isn't especially careful about that. So we have to do it
|
|
||||||
filter it ourselves. */
|
|
||||||
FileCollection taskClasspath = classpath.filter { it.exists() }
|
|
||||||
args('-cp', taskClasspath.asPath, 'org.elasticsearch.bootstrap.JarHell')
|
|
||||||
})
|
|
||||||
doLast({
|
|
||||||
successMarker.parentFile.mkdirs()
|
|
||||||
successMarker.setText("", 'UTF-8')
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,108 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.gradle.precommit
|
|
||||||
|
|
||||||
import org.elasticsearch.gradle.LoggedExec
|
|
||||||
import org.gradle.api.file.FileCollection
|
|
||||||
import org.gradle.api.tasks.InputFiles
|
|
||||||
import org.gradle.api.tasks.OutputFile
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs LoggerUsageCheck on a set of directories.
|
|
||||||
*/
|
|
||||||
public class LoggerUsageTask extends LoggedExec {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We use a simple "marker" file that we touch when the task succeeds
|
|
||||||
* as the task output. This is compared against the modified time of the
|
|
||||||
* inputs (ie the jars/class files).
|
|
||||||
*/
|
|
||||||
private File successMarker = new File(project.buildDir, 'markers/loggerUsage')
|
|
||||||
|
|
||||||
private FileCollection classpath;
|
|
||||||
|
|
||||||
private FileCollection classDirectories;
|
|
||||||
|
|
||||||
public LoggerUsageTask() {
|
|
||||||
project.afterEvaluate {
|
|
||||||
dependsOn(classpath)
|
|
||||||
description = "Runs LoggerUsageCheck on ${classDirectories}"
|
|
||||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
|
||||||
if (classDirectories == null) {
|
|
||||||
// Default to main and test class files
|
|
||||||
List files = []
|
|
||||||
// But only if the source sets that will make them exist
|
|
||||||
if (project.sourceSets.findByName("main")) {
|
|
||||||
files.addAll(project.sourceSets.main.output.classesDirs.getFiles())
|
|
||||||
dependsOn project.tasks.classes
|
|
||||||
}
|
|
||||||
if (project.sourceSets.findByName("test")) {
|
|
||||||
files.addAll(project.sourceSets.test.output.classesDirs.getFiles())
|
|
||||||
dependsOn project.tasks.testClasses
|
|
||||||
}
|
|
||||||
/* In an extra twist, it isn't good enough that the source set
|
|
||||||
* exists. Empty source sets won't make a classes directory
|
|
||||||
* which will cause the check to fail. We have to filter the
|
|
||||||
* empty directories out manually. This filter is done right
|
|
||||||
* before the actual logger usage check giving the rest of the
|
|
||||||
* build the opportunity to actually build the directory.
|
|
||||||
*/
|
|
||||||
classDirectories = project.files(files).filter { it.exists() }
|
|
||||||
}
|
|
||||||
doFirst({
|
|
||||||
args('-cp', getClasspath().asPath, 'org.elasticsearch.test.loggerusage.ESLoggerUsageChecker')
|
|
||||||
getClassDirectories().each {
|
|
||||||
args it.getAbsolutePath()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doLast({
|
|
||||||
successMarker.parentFile.mkdirs()
|
|
||||||
successMarker.setText("", 'UTF-8')
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@InputFiles
|
|
||||||
FileCollection getClasspath() {
|
|
||||||
return classpath
|
|
||||||
}
|
|
||||||
|
|
||||||
void setClasspath(FileCollection classpath) {
|
|
||||||
this.classpath = classpath
|
|
||||||
}
|
|
||||||
|
|
||||||
@InputFiles
|
|
||||||
FileCollection getClassDirectories() {
|
|
||||||
return classDirectories
|
|
||||||
}
|
|
||||||
|
|
||||||
void setClassDirectories(FileCollection classDirectories) {
|
|
||||||
this.classDirectories = classDirectories
|
|
||||||
}
|
|
||||||
|
|
||||||
@OutputFile
|
|
||||||
File getSuccessMarker() {
|
|
||||||
return successMarker
|
|
||||||
}
|
|
||||||
|
|
||||||
void setSuccessMarker(File successMarker) {
|
|
||||||
this.successMarker = successMarker
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -18,10 +18,10 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.gradle.precommit
|
package org.elasticsearch.gradle.precommit
|
||||||
|
|
||||||
|
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
||||||
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.Task
|
import org.gradle.api.Task
|
||||||
import org.gradle.api.artifacts.Configuration
|
|
||||||
import org.gradle.api.plugins.JavaBasePlugin
|
import org.gradle.api.plugins.JavaBasePlugin
|
||||||
import org.gradle.api.plugins.quality.Checkstyle
|
import org.gradle.api.plugins.quality.Checkstyle
|
||||||
/**
|
/**
|
||||||
|
@ -70,19 +70,29 @@ class PrecommitTasks {
|
||||||
precommitTasks.add(configureLoggerUsage(project))
|
precommitTasks.add(configureLoggerUsage(project))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We want to get any compilation error before running the pre-commit checks.
|
||||||
|
project.sourceSets.all { sourceSet ->
|
||||||
|
precommitTasks.each { task ->
|
||||||
|
task.shouldRunAfter(sourceSet.getClassesTaskName())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Map<String, Object> precommitOptions = [
|
return project.tasks.create([
|
||||||
name: 'precommit',
|
name: 'precommit',
|
||||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||||
description: 'Runs all non-test checks.',
|
description: 'Runs all non-test checks.',
|
||||||
dependsOn: precommitTasks
|
dependsOn: precommitTasks
|
||||||
]
|
])
|
||||||
return project.tasks.create(precommitOptions)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Task configureJarHell(Project project) {
|
private static Task configureJarHell(Project project) {
|
||||||
Task task = project.tasks.create('jarHell', JarHellTask.class)
|
Task task = project.tasks.create('jarHell', JarHellTask.class)
|
||||||
task.classpath = project.sourceSets.test.runtimeClasspath
|
task.classpath = project.sourceSets.test.runtimeClasspath
|
||||||
|
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
||||||
|
task.classpath += project.configurations.bundle
|
||||||
|
}
|
||||||
|
task.dependsOn(project.sourceSets.test.classesTaskName)
|
||||||
|
task.javaHome = project.runtimeJavaHome
|
||||||
return task
|
return task
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -201,22 +211,20 @@ class PrecommitTasks {
|
||||||
|
|
||||||
private static Task configureNamingConventions(Project project) {
|
private static Task configureNamingConventions(Project project) {
|
||||||
if (project.sourceSets.findByName("test")) {
|
if (project.sourceSets.findByName("test")) {
|
||||||
return project.tasks.create('namingConventions', NamingConventionsTask)
|
Task namingConventionsTask = project.tasks.create('namingConventions', NamingConventionsTask)
|
||||||
|
namingConventionsTask.javaHome = project.runtimeJavaHome
|
||||||
|
return namingConventionsTask
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Task configureLoggerUsage(Project project) {
|
private static Task configureLoggerUsage(Project project) {
|
||||||
Task loggerUsageTask = project.tasks.create('loggerUsageCheck', LoggerUsageTask.class)
|
|
||||||
|
|
||||||
project.configurations.create('loggerUsagePlugin')
|
project.configurations.create('loggerUsagePlugin')
|
||||||
project.dependencies.add('loggerUsagePlugin',
|
project.dependencies.add('loggerUsagePlugin',
|
||||||
"org.elasticsearch.test:logger-usage:${org.elasticsearch.gradle.VersionProperties.elasticsearch}")
|
"org.elasticsearch.test:logger-usage:${org.elasticsearch.gradle.VersionProperties.elasticsearch}")
|
||||||
|
return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) {
|
||||||
loggerUsageTask.configure {
|
|
||||||
classpath = project.configurations.loggerUsagePlugin
|
classpath = project.configurations.loggerUsagePlugin
|
||||||
|
javaHome = project.runtimeJavaHome
|
||||||
}
|
}
|
||||||
|
|
||||||
return loggerUsageTask
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,17 @@
|
||||||
package org.elasticsearch.gradle;
|
package org.elasticsearch.gradle;
|
||||||
|
|
||||||
|
import org.gradle.api.Action;
|
||||||
import org.gradle.api.GradleException;
|
import org.gradle.api.GradleException;
|
||||||
|
import org.gradle.api.Project;
|
||||||
import org.gradle.api.tasks.Exec;
|
import org.gradle.api.tasks.Exec;
|
||||||
|
import org.gradle.process.BaseExecSpec;
|
||||||
|
import org.gradle.process.ExecResult;
|
||||||
|
import org.gradle.process.ExecSpec;
|
||||||
|
import org.gradle.process.JavaExecSpec;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A wrapper around gradle's Exec task to capture output and log on error.
|
* A wrapper around gradle's Exec task to capture output and log on error.
|
||||||
|
@ -12,9 +19,8 @@ import java.io.UnsupportedEncodingException;
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public class LoggedExec extends Exec {
|
public class LoggedExec extends Exec {
|
||||||
|
|
||||||
protected ByteArrayOutputStream output = new ByteArrayOutputStream();
|
|
||||||
|
|
||||||
public LoggedExec() {
|
public LoggedExec() {
|
||||||
|
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||||
if (getLogger().isInfoEnabled() == false) {
|
if (getLogger().isInfoEnabled() == false) {
|
||||||
setStandardOutput(output);
|
setStandardOutput(output);
|
||||||
setErrorOutput(output);
|
setErrorOutput(output);
|
||||||
|
@ -41,4 +47,39 @@ public class LoggedExec extends Exec {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static ExecResult exec(Project project, Action<ExecSpec> action) {
|
||||||
|
return genericExec(project, project::exec, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ExecResult javaexec(Project project, Action<JavaExecSpec> action) {
|
||||||
|
return genericExec(project, project::javaexec, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends BaseExecSpec> ExecResult genericExec(
|
||||||
|
Project project,
|
||||||
|
Function<Action<T>,ExecResult> function,
|
||||||
|
Action<T> action
|
||||||
|
) {
|
||||||
|
if (project.getLogger().isInfoEnabled()) {
|
||||||
|
return function.apply(action);
|
||||||
|
}
|
||||||
|
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||||
|
try {
|
||||||
|
return function.apply(spec -> {
|
||||||
|
spec.setStandardOutput(output);
|
||||||
|
spec.setErrorOutput(output);
|
||||||
|
action.execute(spec);
|
||||||
|
});
|
||||||
|
} catch (Exception e) {
|
||||||
|
try {
|
||||||
|
for (String line : output.toString("UTF-8").split("\\R")) {
|
||||||
|
project.getLogger().error(line);
|
||||||
|
}
|
||||||
|
} catch (UnsupportedEncodingException ue) {
|
||||||
|
throw new GradleException("Failed to read exec output", ue);
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.gradle.precommit;
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
import org.gradle.api.DefaultTask;
|
import org.elasticsearch.gradle.LoggedExec;
|
||||||
import org.gradle.api.JavaVersion;
|
import org.gradle.api.JavaVersion;
|
||||||
import org.gradle.api.artifacts.Configuration;
|
import org.gradle.api.artifacts.Configuration;
|
||||||
import org.gradle.api.file.FileCollection;
|
import org.gradle.api.file.FileCollection;
|
||||||
|
@ -26,22 +26,18 @@ import org.gradle.api.logging.Logger;
|
||||||
import org.gradle.api.logging.Logging;
|
import org.gradle.api.logging.Logging;
|
||||||
import org.gradle.api.tasks.Input;
|
import org.gradle.api.tasks.Input;
|
||||||
import org.gradle.api.tasks.InputFiles;
|
import org.gradle.api.tasks.InputFiles;
|
||||||
import org.gradle.api.tasks.OutputFile;
|
|
||||||
import org.gradle.api.tasks.SkipWhenEmpty;
|
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||||
import org.gradle.api.tasks.SourceSet;
|
import org.gradle.api.tasks.SourceSet;
|
||||||
import org.gradle.api.tasks.TaskAction;
|
import org.gradle.api.tasks.TaskAction;
|
||||||
import org.gradle.process.JavaExecSpec;
|
import org.gradle.process.JavaExecSpec;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.LinkedHashSet;
|
import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
public class ForbiddenApisCliTask extends DefaultTask {
|
public class ForbiddenApisCliTask extends PrecommitTask {
|
||||||
|
|
||||||
private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class);
|
private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class);
|
||||||
private FileCollection signaturesFiles;
|
private FileCollection signaturesFiles;
|
||||||
|
@ -71,14 +67,6 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@OutputFile
|
|
||||||
public File getMarkerFile() {
|
|
||||||
return new File(
|
|
||||||
new File(getProject().getBuildDir(), "precommit"),
|
|
||||||
getName()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
@InputFiles
|
@InputFiles
|
||||||
@SkipWhenEmpty
|
@SkipWhenEmpty
|
||||||
public FileCollection getClassesDirs() {
|
public FileCollection getClassesDirs() {
|
||||||
|
@ -152,8 +140,8 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
@TaskAction
|
@TaskAction
|
||||||
public void runForbiddenApisAndWriteMarker() throws IOException {
|
public void runForbiddenApisAndWriteMarker() {
|
||||||
getProject().javaexec((JavaExecSpec spec) -> {
|
LoggedExec.javaexec(getProject(), (JavaExecSpec spec) -> {
|
||||||
spec.classpath(
|
spec.classpath(
|
||||||
getForbiddenAPIsConfiguration(),
|
getForbiddenAPIsConfiguration(),
|
||||||
getClassPathFromSourceSet()
|
getClassPathFromSourceSet()
|
||||||
|
@ -184,7 +172,6 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
||||||
spec.args("-d", dir)
|
spec.args("-d", dir)
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
Files.write(getMarkerFile().toPath(), Collections.emptyList());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
|
import org.elasticsearch.gradle.LoggedExec;
|
||||||
|
import org.gradle.api.file.FileCollection;
|
||||||
|
import org.gradle.api.tasks.Classpath;
|
||||||
|
import org.gradle.api.tasks.Input;
|
||||||
|
import org.gradle.api.tasks.TaskAction;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs CheckJarHell on a classpath.
|
||||||
|
*/
|
||||||
|
public class JarHellTask extends PrecommitTask {
|
||||||
|
|
||||||
|
private FileCollection classpath;
|
||||||
|
|
||||||
|
private Object javaHome;
|
||||||
|
|
||||||
|
public JarHellTask() {
|
||||||
|
setDescription("Runs CheckJarHell on the configured classpath");
|
||||||
|
}
|
||||||
|
|
||||||
|
@TaskAction
|
||||||
|
public void runJarHellCheck() {
|
||||||
|
LoggedExec.javaexec(getProject(), spec -> {
|
||||||
|
spec.classpath(getClasspath());
|
||||||
|
spec.executable(getJavaHome() + "/bin/java");
|
||||||
|
spec.setMain("org.elasticsearch.bootstrap.JarHell");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Input
|
||||||
|
public Object getJavaHome() {
|
||||||
|
return javaHome;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setJavaHome(Object javaHome) {
|
||||||
|
this.javaHome = javaHome;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Classpath
|
||||||
|
public FileCollection getClasspath() {
|
||||||
|
return classpath.filter(file -> file.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClasspath(FileCollection classpath) {
|
||||||
|
this.classpath = classpath;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
|
import org.elasticsearch.gradle.LoggedExec;
|
||||||
|
import org.gradle.api.file.FileCollection;
|
||||||
|
import org.gradle.api.plugins.JavaPluginConvention;
|
||||||
|
import org.gradle.api.tasks.Classpath;
|
||||||
|
import org.gradle.api.tasks.Input;
|
||||||
|
import org.gradle.api.tasks.InputFiles;
|
||||||
|
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||||
|
import org.gradle.api.tasks.TaskAction;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs LoggerUsageCheck on a set of directories.
|
||||||
|
*/
|
||||||
|
public class LoggerUsageTask extends PrecommitTask {
|
||||||
|
|
||||||
|
public LoggerUsageTask() {
|
||||||
|
setDescription("Runs LoggerUsageCheck on output directories of all source sets");
|
||||||
|
getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets().all(sourceSet -> {
|
||||||
|
dependsOn(sourceSet.getClassesTaskName());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@TaskAction
|
||||||
|
public void runLoggerUsageTask() {
|
||||||
|
LoggedExec.javaexec(getProject(), spec -> {
|
||||||
|
spec.setMain("org.elasticsearch.test.loggerusage.ESLoggerUsageChecker");
|
||||||
|
spec.classpath(getClasspath());
|
||||||
|
spec.executable(getJavaHome() + "/bin/java");
|
||||||
|
getClassDirectories().forEach(spec::args);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Classpath
|
||||||
|
public FileCollection getClasspath() {
|
||||||
|
return classpath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClasspath(FileCollection classpath) {
|
||||||
|
this.classpath = classpath;
|
||||||
|
}
|
||||||
|
|
||||||
|
@InputFiles
|
||||||
|
@SkipWhenEmpty
|
||||||
|
public FileCollection getClassDirectories() {
|
||||||
|
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets().stream()
|
||||||
|
// Don't pick up all source sets like the java9 ones as logger-check doesn't support the class format
|
||||||
|
.filter(sourceSet -> sourceSet.getName().equals("main") || sourceSet.getName().equals("test"))
|
||||||
|
.map(sourceSet -> sourceSet.getOutput().getClassesDirs())
|
||||||
|
.reduce(FileCollection::plus)
|
||||||
|
.orElse(getProject().files())
|
||||||
|
.filter(File::exists);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Input
|
||||||
|
public Object getJavaHome() {
|
||||||
|
return javaHome;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setJavaHome(Object javaHome) {
|
||||||
|
this.javaHome = javaHome;
|
||||||
|
}
|
||||||
|
|
||||||
|
private FileCollection classpath;
|
||||||
|
private Object javaHome;
|
||||||
|
}
|
|
@ -1,24 +1,20 @@
|
||||||
package org.elasticsearch.gradle.precommit;
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
import groovy.lang.Closure;
|
|
||||||
import org.elasticsearch.gradle.LoggedExec;
|
import org.elasticsearch.gradle.LoggedExec;
|
||||||
import org.elasticsearch.test.NamingConventionsCheck;
|
import org.elasticsearch.test.NamingConventionsCheck;
|
||||||
import org.gradle.api.GradleException;
|
import org.gradle.api.GradleException;
|
||||||
import org.gradle.api.Project;
|
|
||||||
import org.gradle.api.Task;
|
|
||||||
import org.gradle.api.file.FileCollection;
|
import org.gradle.api.file.FileCollection;
|
||||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
|
||||||
import org.gradle.api.plugins.JavaPluginConvention;
|
import org.gradle.api.plugins.JavaPluginConvention;
|
||||||
|
import org.gradle.api.tasks.Classpath;
|
||||||
import org.gradle.api.tasks.Input;
|
import org.gradle.api.tasks.Input;
|
||||||
import org.gradle.api.tasks.OutputFile;
|
import org.gradle.api.tasks.InputFiles;
|
||||||
|
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||||
import org.gradle.api.tasks.SourceSetContainer;
|
import org.gradle.api.tasks.SourceSetContainer;
|
||||||
|
import org.gradle.api.tasks.TaskAction;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs NamingConventionsCheck on a classpath/directory combo to verify that
|
* Runs NamingConventionsCheck on a classpath/directory combo to verify that
|
||||||
|
@ -26,102 +22,83 @@ import java.util.Objects;
|
||||||
* gradle. Read the Javadoc for NamingConventionsCheck to learn more.
|
* gradle. Read the Javadoc for NamingConventionsCheck to learn more.
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public class NamingConventionsTask extends LoggedExec {
|
public class NamingConventionsTask extends PrecommitTask {
|
||||||
|
|
||||||
public NamingConventionsTask() {
|
public NamingConventionsTask() {
|
||||||
setDescription("Tests that test classes aren't misnamed or misplaced");
|
setDescription("Tests that test classes aren't misnamed or misplaced");
|
||||||
final Project project = getProject();
|
dependsOn(getJavaSourceSets().getByName(checkForTestsInMain ? "main" : "test").getClassesTaskName());
|
||||||
|
}
|
||||||
|
|
||||||
SourceSetContainer sourceSets = getJavaSourceSets();
|
@TaskAction
|
||||||
final FileCollection classpath;
|
public void runNamingConventions() {
|
||||||
try {
|
LoggedExec.javaexec(getProject(), spec -> {
|
||||||
URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation();
|
spec.classpath(
|
||||||
if (location.getProtocol().equals("file") == false) {
|
getNamingConventionsCheckClassFiles(),
|
||||||
throw new GradleException("Unexpected location for NamingConventionCheck class: "+ location);
|
getSourceSetClassPath()
|
||||||
}
|
|
||||||
classpath = project.files(
|
|
||||||
// This works because the class only depends on one class from junit that will be available from the
|
|
||||||
// tests compile classpath. It's the most straight forward way of telling Java where to find the main
|
|
||||||
// class.
|
|
||||||
location.toURI().getPath(),
|
|
||||||
// the tests to be loaded
|
|
||||||
checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(),
|
|
||||||
sourceSets.getByName("test").getCompileClasspath(),
|
|
||||||
sourceSets.getByName("test").getOutput()
|
|
||||||
);
|
);
|
||||||
|
spec.executable(getJavaHome() + "/bin/java");
|
||||||
|
spec.jvmArgs("-Djna.nosys=true");
|
||||||
|
spec.setMain(NamingConventionsCheck.class.getName());
|
||||||
|
spec.args("--test-class", getTestClass());
|
||||||
|
if (isSkipIntegTestInDisguise()) {
|
||||||
|
spec.args("--skip-integ-tests-in-disguise");
|
||||||
|
} else {
|
||||||
|
spec.args("--integ-test-class", getIntegTestClass());
|
||||||
|
}
|
||||||
|
if (isCheckForTestsInMain()) {
|
||||||
|
spec.args("--main");
|
||||||
|
spec.args("--");
|
||||||
|
} else {
|
||||||
|
spec.args("--");
|
||||||
|
}
|
||||||
|
spec.args(getExistingClassesDirs().getAsPath());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Input
|
||||||
|
public Object getJavaHome() {
|
||||||
|
return javaHome;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setJavaHome(Object javaHome) {
|
||||||
|
this.javaHome = javaHome;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Classpath
|
||||||
|
public FileCollection getSourceSetClassPath() {
|
||||||
|
SourceSetContainer sourceSets = getJavaSourceSets();
|
||||||
|
return getProject().files(
|
||||||
|
sourceSets.getByName("test").getCompileClasspath(),
|
||||||
|
sourceSets.getByName("test").getOutput(),
|
||||||
|
checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : getProject().files()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@InputFiles
|
||||||
|
public File getNamingConventionsCheckClassFiles() {
|
||||||
|
// This works because the class only depends on one class from junit that will be available from the
|
||||||
|
// tests compile classpath. It's the most straight forward way of telling Java where to find the main
|
||||||
|
// class.
|
||||||
|
URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation();
|
||||||
|
if (location.getProtocol().equals("file") == false) {
|
||||||
|
throw new GradleException("Unexpected location for NamingConventionCheck class: "+ location);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return new File(location.toURI().getPath());
|
||||||
} catch (URISyntaxException e) {
|
} catch (URISyntaxException e) {
|
||||||
throw new AssertionError(e);
|
throw new AssertionError(e);
|
||||||
}
|
}
|
||||||
dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName())));
|
|
||||||
getInputs().files(classpath);
|
|
||||||
|
|
||||||
setExecutable(new File(
|
|
||||||
Objects.requireNonNull(
|
|
||||||
project.getExtensions().getByType(ExtraPropertiesExtension.class).get("runtimeJavaHome")
|
|
||||||
).toString(),
|
|
||||||
"bin/java")
|
|
||||||
);
|
|
||||||
|
|
||||||
if (checkForTestsInMain == false) {
|
|
||||||
/* This task is created by default for all subprojects with this
|
|
||||||
* setting and there is no point in running it if the files don't
|
|
||||||
* exist. */
|
|
||||||
onlyIf((unused) -> getExistingClassesDirs().isEmpty() == false);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
|
|
||||||
* ready for us. Strangely neither one on their own are good enough.
|
|
||||||
*/
|
|
||||||
project.afterEvaluate(new Closure<Void>(this, this) {
|
|
||||||
public void doCall(Project it) {
|
|
||||||
doFirst(unused -> {
|
|
||||||
args("-Djna.nosys=true");
|
|
||||||
args("-cp", classpath.getAsPath(), "org.elasticsearch.test.NamingConventionsCheck");
|
|
||||||
args("--test-class", getTestClass());
|
|
||||||
if (skipIntegTestInDisguise) {
|
|
||||||
args("--skip-integ-tests-in-disguise");
|
|
||||||
} else {
|
|
||||||
args("--integ-test-class", getIntegTestClass());
|
|
||||||
}
|
|
||||||
if (getCheckForTestsInMain()) {
|
|
||||||
args("--main");
|
|
||||||
args("--");
|
|
||||||
} else {
|
|
||||||
args("--");
|
|
||||||
}
|
|
||||||
args(getExistingClassesDirs().getAsPath());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
doLast((Task it) -> {
|
|
||||||
try {
|
|
||||||
try (FileWriter fw = new FileWriter(getSuccessMarker())) {
|
|
||||||
fw.write("");
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new GradleException("io exception", e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private SourceSetContainer getJavaSourceSets() {
|
|
||||||
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@InputFiles
|
||||||
|
@SkipWhenEmpty
|
||||||
public FileCollection getExistingClassesDirs() {
|
public FileCollection getExistingClassesDirs() {
|
||||||
FileCollection classesDirs = getJavaSourceSets().getByName(checkForTestsInMain ? "main" : "test")
|
FileCollection classesDirs = getJavaSourceSets().getByName(checkForTestsInMain ? "main" : "test")
|
||||||
.getOutput().getClassesDirs();
|
.getOutput().getClassesDirs();
|
||||||
return classesDirs.filter(it -> it.exists());
|
return classesDirs.filter(it -> it.exists());
|
||||||
}
|
}
|
||||||
|
|
||||||
public File getSuccessMarker() {
|
@Input
|
||||||
return successMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSuccessMarker(File successMarker) {
|
|
||||||
this.successMarker = successMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isSkipIntegTestInDisguise() {
|
public boolean isSkipIntegTestInDisguise() {
|
||||||
return skipIntegTestInDisguise;
|
return skipIntegTestInDisguise;
|
||||||
}
|
}
|
||||||
|
@ -130,6 +107,7 @@ public class NamingConventionsTask extends LoggedExec {
|
||||||
this.skipIntegTestInDisguise = skipIntegTestInDisguise;
|
this.skipIntegTestInDisguise = skipIntegTestInDisguise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Input
|
||||||
public String getTestClass() {
|
public String getTestClass() {
|
||||||
return testClass;
|
return testClass;
|
||||||
}
|
}
|
||||||
|
@ -138,6 +116,7 @@ public class NamingConventionsTask extends LoggedExec {
|
||||||
this.testClass = testClass;
|
this.testClass = testClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Input
|
||||||
public String getIntegTestClass() {
|
public String getIntegTestClass() {
|
||||||
return integTestClass;
|
return integTestClass;
|
||||||
}
|
}
|
||||||
|
@ -146,10 +125,7 @@ public class NamingConventionsTask extends LoggedExec {
|
||||||
this.integTestClass = integTestClass;
|
this.integTestClass = integTestClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean getCheckForTestsInMain() {
|
@Input
|
||||||
return checkForTestsInMain;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isCheckForTestsInMain() {
|
public boolean isCheckForTestsInMain() {
|
||||||
return checkForTestsInMain;
|
return checkForTestsInMain;
|
||||||
}
|
}
|
||||||
|
@ -158,33 +134,34 @@ public class NamingConventionsTask extends LoggedExec {
|
||||||
this.checkForTestsInMain = checkForTestsInMain;
|
this.checkForTestsInMain = checkForTestsInMain;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private SourceSetContainer getJavaSourceSets() {
|
||||||
|
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* We use a simple "marker" file that we touch when the task succeeds
|
* The java home to run the check with
|
||||||
* as the task output. This is compared against the modified time of the
|
|
||||||
* inputs (ie the jars/class files).
|
|
||||||
*/
|
*/
|
||||||
@OutputFile
|
private Object javaHome; // Make it an Object to allow for Groovy GString
|
||||||
private File successMarker = new File(getProject().getBuildDir(), "markers/" + this.getName());
|
|
||||||
/**
|
/**
|
||||||
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
|
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
|
||||||
* integ tests correctly.
|
* integ tests correctly.
|
||||||
*/
|
*/
|
||||||
@Input
|
|
||||||
private boolean skipIntegTestInDisguise = false;
|
private boolean skipIntegTestInDisguise = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Superclass for all tests.
|
* Superclass for all tests.
|
||||||
*/
|
*/
|
||||||
@Input
|
|
||||||
private String testClass = "org.apache.lucene.util.LuceneTestCase";
|
private String testClass = "org.apache.lucene.util.LuceneTestCase";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Superclass for all integration tests.
|
* Superclass for all integration tests.
|
||||||
*/
|
*/
|
||||||
@Input
|
|
||||||
private String integTestClass = "org.elasticsearch.test.ESIntegTestCase";
|
private String integTestClass = "org.elasticsearch.test.ESIntegTestCase";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Should the test also check the main classpath for test classes instead of
|
* Should the test also check the main classpath for test classes instead of
|
||||||
* doing the usual checks to the test classpath.
|
* doing the usual checks to the test classpath.
|
||||||
*/
|
*/
|
||||||
@Input
|
|
||||||
private boolean checkForTestsInMain = false;
|
private boolean checkForTestsInMain = false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
|
import org.gradle.api.DefaultTask;
|
||||||
|
import org.gradle.api.tasks.OutputFile;
|
||||||
|
import org.gradle.api.tasks.TaskAction;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.StandardOpenOption;
|
||||||
|
|
||||||
|
public class PrecommitTask extends DefaultTask {
|
||||||
|
|
||||||
|
@OutputFile
|
||||||
|
public File getSuccessMarker() {
|
||||||
|
return new File(getProject().getBuildDir(), "markers/" + this.getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@TaskAction
|
||||||
|
public void writeMarker() throws IOException {
|
||||||
|
getSuccessMarker().getParentFile().mkdirs();
|
||||||
|
Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
|
@ -197,6 +198,24 @@ final class MLRequestConverters {
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Request getDatafeed(GetDatafeedRequest getDatafeedRequest) {
|
||||||
|
String endpoint = new EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("ml")
|
||||||
|
.addPathPartAsIs("datafeeds")
|
||||||
|
.addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedRequest.getDatafeedIds()))
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
if (getDatafeedRequest.isAllowNoDatafeeds() != null) {
|
||||||
|
params.putParam(GetDatafeedRequest.ALLOW_NO_DATAFEEDS.getPreferredName(),
|
||||||
|
Boolean.toString(getDatafeedRequest.isAllowNoDatafeeds()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) {
|
static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) {
|
||||||
String endpoint = new EndpointBuilder()
|
String endpoint = new EndpointBuilder()
|
||||||
.addPathPartAsIs("_xpack")
|
.addPathPartAsIs("_xpack")
|
||||||
|
|
|
@ -33,6 +33,8 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersResponse;
|
import org.elasticsearch.client.ml.GetInfluencersResponse;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
|
@ -466,8 +468,8 @@ public final class MachineLearningClient {
|
||||||
* For additional info
|
* For additional info
|
||||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html">ML PUT datafeed documentation</a>
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html">ML PUT datafeed documentation</a>
|
||||||
*
|
*
|
||||||
* @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings
|
* @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings
|
||||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener Listener to be notified upon request completion
|
* @param listener Listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void putDatafeedAsync(PutDatafeedRequest request, RequestOptions options, ActionListener<PutDatafeedResponse> listener) {
|
public void putDatafeedAsync(PutDatafeedRequest request, RequestOptions options, ActionListener<PutDatafeedResponse> listener) {
|
||||||
|
@ -479,6 +481,47 @@ public final class MachineLearningClient {
|
||||||
Collections.emptySet());
|
Collections.emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets one or more Machine Learning datafeed configuration info.
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html">ML GET datafeed documentation</a>
|
||||||
|
*
|
||||||
|
* @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return {@link GetDatafeedResponse} response object containing
|
||||||
|
* the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} objects and the number of jobs found
|
||||||
|
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||||
|
*/
|
||||||
|
public GetDatafeedResponse getDatafeed(GetDatafeedRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
MLRequestConverters::getDatafeed,
|
||||||
|
options,
|
||||||
|
GetDatafeedResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets one or more Machine Learning datafeed configuration info, asynchronously.
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html">ML GET datafeed documentation</a>
|
||||||
|
*
|
||||||
|
* @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener Listener to be notified with {@link GetDatafeedResponse} upon request completion
|
||||||
|
*/
|
||||||
|
public void getDatafeedAsync(GetDatafeedRequest request, RequestOptions options, ActionListener<GetDatafeedResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
MLRequestConverters::getDatafeed,
|
||||||
|
options,
|
||||||
|
GetDatafeedResponse::fromXContent,
|
||||||
|
listener,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes the given Machine Learning Datafeed
|
* Deletes the given Machine Learning Datafeed
|
||||||
* <p>
|
* <p>
|
||||||
|
|
|
@ -220,6 +220,7 @@ public class RestHighLevelClient implements Closeable {
|
||||||
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
||||||
private final SecurityClient securityClient = new SecurityClient(this);
|
private final SecurityClient securityClient = new SecurityClient(this);
|
||||||
private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this);
|
private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this);
|
||||||
|
private final RollupClient rollupClient = new RollupClient(this);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
|
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
|
||||||
|
@ -301,6 +302,18 @@ public class RestHighLevelClient implements Closeable {
|
||||||
return snapshotClient;
|
return snapshotClient;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides methods for accessing the Elastic Licensed Rollup APIs that
|
||||||
|
* are shipped with the default distribution of Elasticsearch. All of
|
||||||
|
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||||
|
* <p>
|
||||||
|
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-apis.html">
|
||||||
|
* Watcher APIs on elastic.co</a> for more information.
|
||||||
|
*/
|
||||||
|
public RollupClient rollup() {
|
||||||
|
return rollupClient;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides a {@link TasksClient} which can be used to access the Tasks API.
|
* Provides a {@link TasksClient} which can be used to access the Tasks API.
|
||||||
*
|
*
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A wrapper for the {@link RestHighLevelClient} that provides methods for
|
||||||
|
* accessing the Elastic Rollup-related methods
|
||||||
|
* <p>
|
||||||
|
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-apis.html">
|
||||||
|
* X-Pack Rollup APIs on elastic.co</a> for more information.
|
||||||
|
*/
|
||||||
|
public class RollupClient {
|
||||||
|
|
||||||
|
private final RestHighLevelClient restHighLevelClient;
|
||||||
|
|
||||||
|
RollupClient(final RestHighLevelClient restHighLevelClient) {
|
||||||
|
this.restHighLevelClient = restHighLevelClient;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Put a rollup job into the cluster
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
* @param request the request
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return the response
|
||||||
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
|
*/
|
||||||
|
public PutRollupJobResponse putRollupJob(PutRollupJobRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
RollupRequestConverters::putJob,
|
||||||
|
options,
|
||||||
|
PutRollupJobResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously put a rollup job into the cluster
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
* @param request the request
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener the listener to be notified upon request completion
|
||||||
|
*/
|
||||||
|
public void putRollupJobAsync(PutRollupJobRequest request, RequestOptions options, ActionListener<PutRollupJobResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
RollupRequestConverters::putJob,
|
||||||
|
options,
|
||||||
|
PutRollupJobResponse::fromXContent,
|
||||||
|
listener, Collections.emptySet());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||||
|
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||||
|
|
||||||
|
final class RollupRequestConverters {
|
||||||
|
|
||||||
|
private RollupRequestConverters() {
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request putJob(final PutRollupJobRequest putRollupJobRequest) throws IOException {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("rollup")
|
||||||
|
.addPathPartAsIs("job")
|
||||||
|
.addPathPart(putRollupJobRequest.getConfig().getId())
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||||
|
request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,6 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -31,10 +33,23 @@ public class ValidationException extends IllegalArgumentException {
|
||||||
* Add a new validation error to the accumulating validation errors
|
* Add a new validation error to the accumulating validation errors
|
||||||
* @param error the error to add
|
* @param error the error to add
|
||||||
*/
|
*/
|
||||||
public void addValidationError(String error) {
|
public void addValidationError(final String error) {
|
||||||
validationErrors.add(error);
|
validationErrors.add(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds validation errors from an existing {@link ValidationException} to
|
||||||
|
* the accumulating validation errors
|
||||||
|
* @param exception the {@link ValidationException} to add errors from
|
||||||
|
*/
|
||||||
|
public final void addValidationErrors(final @Nullable ValidationException exception) {
|
||||||
|
if (exception != null) {
|
||||||
|
for (String error : exception.validationErrors()) {
|
||||||
|
addValidationError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the validation errors accumulated
|
* Returns the validation errors accumulated
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -136,9 +136,9 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject {
|
||||||
/**
|
/**
|
||||||
* Whether to ignore if a wildcard expression matches no jobs.
|
* Whether to ignore if a wildcard expression matches no jobs.
|
||||||
*
|
*
|
||||||
* This includes `_all` string or when no jobs have been specified
|
* This includes {@code _all} string or when no jobs have been specified
|
||||||
*
|
*
|
||||||
* @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
|
* @param allowNoJobs When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true}
|
||||||
*/
|
*/
|
||||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||||
this.allowNoJobs = allowNoJobs;
|
this.allowNoJobs = allowNoJobs;
|
||||||
|
|
|
@ -109,7 +109,7 @@ public class DeleteForecastRequest extends ActionRequest implements ToXContentOb
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the `allow_no_forecasts` field.
|
* Sets the value of "allow_no_forecasts".
|
||||||
*
|
*
|
||||||
* @param allowNoForecasts when {@code true} no error is thrown when {@link DeleteForecastRequest#ALL} does not find any forecasts
|
* @param allowNoForecasts when {@code true} no error is thrown when {@link DeleteForecastRequest#ALL} does not find any forecasts
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -0,0 +1,144 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request object to get {@link DatafeedConfig} objects with the matching {@code datafeedId}s.
|
||||||
|
*
|
||||||
|
* {@code _all} explicitly gets all the datafeeds in the cluster
|
||||||
|
* An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds in the cluster
|
||||||
|
*/
|
||||||
|
public class GetDatafeedRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField DATAFEED_IDS = new ParseField("datafeed_ids");
|
||||||
|
public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds");
|
||||||
|
|
||||||
|
private static final String ALL_DATAFEEDS = "_all";
|
||||||
|
private final List<String> datafeedIds;
|
||||||
|
private Boolean allowNoDatafeeds;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<GetDatafeedRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
"get_datafeed_request",
|
||||||
|
true, a -> new GetDatafeedRequest(a[0] == null ? new ArrayList<>() : (List<String>) a[0]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), DATAFEED_IDS);
|
||||||
|
PARSER.declareBoolean(GetDatafeedRequest::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper method to create a query that will get ALL datafeeds
|
||||||
|
* @return new {@link GetDatafeedRequest} object searching for the datafeedId "_all"
|
||||||
|
*/
|
||||||
|
public static GetDatafeedRequest getAllDatafeedsRequest() {
|
||||||
|
return new GetDatafeedRequest(ALL_DATAFEEDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the specified {@link DatafeedConfig} configurations via their unique datafeedIds
|
||||||
|
* @param datafeedIds must not contain any null values
|
||||||
|
*/
|
||||||
|
public GetDatafeedRequest(String... datafeedIds) {
|
||||||
|
this(Arrays.asList(datafeedIds));
|
||||||
|
}
|
||||||
|
|
||||||
|
GetDatafeedRequest(List<String> datafeedIds) {
|
||||||
|
if (datafeedIds.stream().anyMatch(Objects::isNull)) {
|
||||||
|
throw new NullPointerException("datafeedIds must not contain null values");
|
||||||
|
}
|
||||||
|
this.datafeedIds = new ArrayList<>(datafeedIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All the datafeedIds for which to get configuration information
|
||||||
|
*/
|
||||||
|
public List<String> getDatafeedIds() {
|
||||||
|
return datafeedIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether to ignore if a wildcard expression matches no datafeeds.
|
||||||
|
*
|
||||||
|
* @param allowNoDatafeeds If this is {@code false}, then an error is returned when a wildcard (or {@code _all})
|
||||||
|
* does not match any datafeeds
|
||||||
|
*/
|
||||||
|
public void setAllowNoDatafeeds(boolean allowNoDatafeeds) {
|
||||||
|
this.allowNoDatafeeds = allowNoDatafeeds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean isAllowNoDatafeeds() {
|
||||||
|
return allowNoDatafeeds;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ActionRequestValidationException validate() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(datafeedIds, allowNoDatafeeds);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (other == null || other.getClass() != getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
GetDatafeedRequest that = (GetDatafeedRequest) other;
|
||||||
|
return Objects.equals(datafeedIds, that.datafeedIds) &&
|
||||||
|
Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
|
||||||
|
if (datafeedIds.isEmpty() == false) {
|
||||||
|
builder.field(DATAFEED_IDS.getPreferredName(), datafeedIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allowNoDatafeeds != null) {
|
||||||
|
builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,89 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains a {@link List} of the found {@link DatafeedConfig} objects and the total count found
|
||||||
|
*/
|
||||||
|
public class GetDatafeedResponse extends AbstractResultResponse<DatafeedConfig> {
|
||||||
|
|
||||||
|
public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<GetDatafeedResponse, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("get_datafeed_response", true,
|
||||||
|
a -> new GetDatafeedResponse((List<DatafeedConfig.Builder>) a[0], (long) a[1]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareObjectArray(constructorArg(), DatafeedConfig.PARSER, RESULTS_FIELD);
|
||||||
|
PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT);
|
||||||
|
}
|
||||||
|
|
||||||
|
GetDatafeedResponse(List<DatafeedConfig.Builder> datafeedBuilders, long count) {
|
||||||
|
super(RESULTS_FIELD, datafeedBuilders.stream().map(DatafeedConfig.Builder::build).collect(Collectors.toList()), count);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The collection of {@link DatafeedConfig} objects found in the query
|
||||||
|
*/
|
||||||
|
public List<DatafeedConfig> datafeeds() {
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GetDatafeedResponse fromXContent(XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(results, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
GetDatafeedResponse other = (GetDatafeedResponse) obj;
|
||||||
|
return Objects.equals(results, other.results) && count == other.count;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
}
|
|
@ -33,11 +33,11 @@ import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Request object to get {@link Job} objects with the matching `jobId`s or
|
* Request object to get {@link Job} objects with the matching {@code jobId}s or
|
||||||
* `groupName`s.
|
* {@code groupName}s.
|
||||||
*
|
*
|
||||||
* `_all` explicitly gets all the jobs in the cluster
|
* {@code _all} explicitly gets all the jobs in the cluster
|
||||||
* An empty request (no `jobId`s) implicitly gets all the jobs in the cluster
|
* An empty request (no {@code jobId}s) implicitly gets all the jobs in the cluster
|
||||||
*/
|
*/
|
||||||
public class GetJobRequest extends ActionRequest implements ToXContentObject {
|
public class GetJobRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ public class GetJobRequest extends ActionRequest implements ToXContentObject {
|
||||||
/**
|
/**
|
||||||
* Whether to ignore if a wildcard expression matches no jobs.
|
* Whether to ignore if a wildcard expression matches no jobs.
|
||||||
*
|
*
|
||||||
* @param allowNoJobs If this is {@code false}, then an error is returned when a wildcard (or `_all`) does not match any jobs
|
* @param allowNoJobs If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs
|
||||||
*/
|
*/
|
||||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||||
this.allowNoJobs = allowNoJobs;
|
this.allowNoJobs = allowNoJobs;
|
||||||
|
|
|
@ -38,8 +38,8 @@ import java.util.Objects;
|
||||||
/**
|
/**
|
||||||
* Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
|
* Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
|
||||||
*
|
*
|
||||||
* `_all` explicitly gets all the jobs' statistics in the cluster
|
* {@code _all} explicitly gets all the jobs' statistics in the cluster
|
||||||
* An empty request (no `jobId`s) implicitly gets all the jobs' statistics in the cluster
|
* An empty request (no {@code jobId}s) implicitly gets all the jobs' statistics in the cluster
|
||||||
*/
|
*/
|
||||||
public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
|
public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
@ -100,9 +100,9 @@ public class GetJobStatsRequest extends ActionRequest implements ToXContentObjec
|
||||||
/**
|
/**
|
||||||
* Whether to ignore if a wildcard expression matches no jobs.
|
* Whether to ignore if a wildcard expression matches no jobs.
|
||||||
*
|
*
|
||||||
* This includes `_all` string or when no jobs have been specified
|
* This includes {@code _all} string or when no jobs have been specified
|
||||||
*
|
*
|
||||||
* @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
|
* @param allowNoJobs When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true}
|
||||||
*/
|
*/
|
||||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||||
this.allowNoJobs = allowNoJobs;
|
this.allowNoJobs = allowNoJobs;
|
||||||
|
|
|
@ -109,7 +109,7 @@ public class GetOverallBucketsRequest extends ActionRequest implements ToXConten
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the value of `top_n`.
|
* Sets the value of "top_n".
|
||||||
* @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1.
|
* @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1.
|
||||||
*/
|
*/
|
||||||
public void setTopN(Integer topN) {
|
public void setTopN(Integer topN) {
|
||||||
|
@ -121,7 +121,7 @@ public class GetOverallBucketsRequest extends ActionRequest implements ToXConten
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the value of `bucket_span`.
|
* Sets the value of "bucket_span".
|
||||||
* @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span.
|
* @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span.
|
||||||
* Defaults to the largest job’s bucket_span.
|
* Defaults to the largest job’s bucket_span.
|
||||||
*/
|
*/
|
||||||
|
@ -197,7 +197,7 @@ public class GetOverallBucketsRequest extends ActionRequest implements ToXConten
|
||||||
/**
|
/**
|
||||||
* Whether to ignore if a wildcard expression matches no jobs.
|
* Whether to ignore if a wildcard expression matches no jobs.
|
||||||
*
|
*
|
||||||
* If this is `false`, then an error is returned when a wildcard (or `_all`) does not match any jobs
|
* If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs
|
||||||
*/
|
*/
|
||||||
public Boolean isAllowNoJobs() {
|
public Boolean isAllowNoJobs() {
|
||||||
return allowNoJobs;
|
return allowNoJobs;
|
||||||
|
|
|
@ -38,7 +38,7 @@ import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POJO for posting data to a Machine Learning job
|
* Request to post data to a Machine Learning job
|
||||||
*/
|
*/
|
||||||
public class PostDataRequest extends ActionRequest implements ToXContentObject {
|
public class PostDataRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
public class PutRollupJobRequest implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
private final RollupJobConfig config;
|
||||||
|
|
||||||
|
public PutRollupJobRequest(final RollupJobConfig config) {
|
||||||
|
this.config = Objects.requireNonNull(config, "rollup job configuration is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
public RollupJobConfig getConfig() {
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
return config.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
return config.validate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
final PutRollupJobRequest that = (PutRollupJobRequest) o;
|
||||||
|
return Objects.equals(config, that.config);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(config);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,80 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
public class PutRollupJobResponse implements ToXContentObject {
|
||||||
|
|
||||||
|
private final boolean acknowledged;
|
||||||
|
|
||||||
|
public PutRollupJobResponse(final boolean acknowledged) {
|
||||||
|
this.acknowledged = acknowledged;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isAcknowledged() {
|
||||||
|
return acknowledged;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final PutRollupJobResponse that = (PutRollupJobResponse) o;
|
||||||
|
return isAcknowledged() == that.isAcknowledged();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(acknowledged);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field("acknowledged", isAcknowledged());
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<PutRollupJobResponse, Void> PARSER
|
||||||
|
= new ConstructingObjectParser<>("put_rollup_job_response", true, args -> new PutRollupJobResponse((boolean) args[0]));
|
||||||
|
static {
|
||||||
|
PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static PutRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,189 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the histograms in the rollup config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "date_histogram": {
|
||||||
|
* "field" : "foo",
|
||||||
|
* "interval" : "1d",
|
||||||
|
* "delay": "30d",
|
||||||
|
* "time_zone" : "EST"
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class DateHistogramGroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "date_histogram";
|
||||||
|
private static final String INTERVAL = "interval";
|
||||||
|
private static final String FIELD = "field";
|
||||||
|
private static final String TIME_ZONE = "time_zone";
|
||||||
|
private static final String DELAY = "delay";
|
||||||
|
private static final String DEFAULT_TIMEZONE = "UTC";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<DateHistogramGroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, a ->
|
||||||
|
new DateHistogramGroupConfig((String) a[0], (DateHistogramInterval) a[1], (DateHistogramInterval) a[2], (String) a[3]));
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(FIELD));
|
||||||
|
PARSER.declareField(constructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING);
|
||||||
|
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING);
|
||||||
|
PARSER.declareString(optionalConstructorArg(), new ParseField(TIME_ZONE));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String field;
|
||||||
|
private final DateHistogramInterval interval;
|
||||||
|
private final DateHistogramInterval delay;
|
||||||
|
private final String timeZone;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new {@link DateHistogramGroupConfig} using the given field and interval parameters.
|
||||||
|
*/
|
||||||
|
public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval) {
|
||||||
|
this(field, interval, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new {@link DateHistogramGroupConfig} using the given configuration parameters.
|
||||||
|
* <p>
|
||||||
|
* The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents.
|
||||||
|
* The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents.
|
||||||
|
* The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using
|
||||||
|
* ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library.
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @param field the name of the date field to use for the date histogram (required)
|
||||||
|
* @param interval the interval to use for the date histogram (required)
|
||||||
|
* @param delay the time delay (optional)
|
||||||
|
* @param timeZone the id of time zone to use to calculate the date histogram (optional). When {@code null}, the UTC timezone is used.
|
||||||
|
*/
|
||||||
|
public DateHistogramGroupConfig(final String field,
|
||||||
|
final DateHistogramInterval interval,
|
||||||
|
final @Nullable DateHistogramInterval delay,
|
||||||
|
final @Nullable String timeZone) {
|
||||||
|
this.field = field;
|
||||||
|
this.interval = interval;
|
||||||
|
this.delay = delay;
|
||||||
|
this.timeZone = (timeZone != null && timeZone.isEmpty() == false) ? timeZone : DEFAULT_TIMEZONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (field == null || field.isEmpty()) {
|
||||||
|
validationException.addValidationError("Field name is required");
|
||||||
|
}
|
||||||
|
if (interval == null) {
|
||||||
|
validationException.addValidationError("Interval is required");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the date field
|
||||||
|
*/
|
||||||
|
public String getField() {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the date interval
|
||||||
|
*/
|
||||||
|
public DateHistogramInterval getInterval() {
|
||||||
|
return interval;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the time delay for this histogram
|
||||||
|
*/
|
||||||
|
public DateHistogramInterval getDelay() {
|
||||||
|
return delay;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the timezone to apply
|
||||||
|
*/
|
||||||
|
public String getTimeZone() {
|
||||||
|
return timeZone;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(INTERVAL, interval.toString());
|
||||||
|
builder.field(FIELD, field);
|
||||||
|
if (delay != null) {
|
||||||
|
builder.field(DELAY, delay.toString());
|
||||||
|
}
|
||||||
|
builder.field(TIME_ZONE, timeZone);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final DateHistogramGroupConfig that = (DateHistogramGroupConfig) other;
|
||||||
|
return Objects.equals(interval, that.interval)
|
||||||
|
&& Objects.equals(field, that.field)
|
||||||
|
&& Objects.equals(delay, that.delay)
|
||||||
|
&& Objects.equals(timeZone, that.timeZone);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(interval, field, delay, timeZone);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static DateHistogramGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,171 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the groups section in the rollup config.
|
||||||
|
* Basically just a wrapper for histo/date histo/terms objects
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "date_histogram": {...},
|
||||||
|
* "histogram" : {...},
|
||||||
|
* "terms" : {...}
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class GroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "groups";
|
||||||
|
private static final ConstructingObjectParser<GroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args ->
|
||||||
|
new GroupConfig((DateHistogramGroupConfig) args[0], (HistogramGroupConfig) args[1], (TermsGroupConfig) args[2]));
|
||||||
|
PARSER.declareObject(constructorArg(),
|
||||||
|
(p, c) -> DateHistogramGroupConfig.fromXContent(p), new ParseField(DateHistogramGroupConfig.NAME));
|
||||||
|
PARSER.declareObject(optionalConstructorArg(),
|
||||||
|
(p, c) -> HistogramGroupConfig.fromXContent(p), new ParseField(HistogramGroupConfig.NAME));
|
||||||
|
PARSER.declareObject(optionalConstructorArg(),
|
||||||
|
(p, c) -> TermsGroupConfig.fromXContent(p), new ParseField(TermsGroupConfig.NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final DateHistogramGroupConfig dateHistogram;
|
||||||
|
private final @Nullable
|
||||||
|
HistogramGroupConfig histogram;
|
||||||
|
private final @Nullable
|
||||||
|
TermsGroupConfig terms;
|
||||||
|
|
||||||
|
public GroupConfig(final DateHistogramGroupConfig dateHistogram) {
|
||||||
|
this(dateHistogram, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GroupConfig(final DateHistogramGroupConfig dateHistogram,
|
||||||
|
final @Nullable HistogramGroupConfig histogram,
|
||||||
|
final @Nullable TermsGroupConfig terms) {
|
||||||
|
this.dateHistogram = dateHistogram;
|
||||||
|
this.histogram = histogram;
|
||||||
|
this.terms = terms;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (dateHistogram != null) {
|
||||||
|
final Optional<ValidationException> dateHistogramValidationErrors = dateHistogram.validate();
|
||||||
|
if (dateHistogramValidationErrors != null && dateHistogramValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(dateHistogramValidationErrors.get());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
validationException.addValidationError("Date histogram must not be null");
|
||||||
|
}
|
||||||
|
if (histogram != null) {
|
||||||
|
final Optional<ValidationException> histogramValidationErrors = histogram.validate();
|
||||||
|
if (histogramValidationErrors != null && histogramValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(histogramValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (terms != null) {
|
||||||
|
final Optional<ValidationException> termsValidationErrors = terms.validate();
|
||||||
|
if (termsValidationErrors != null && termsValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(termsValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the configuration of the date histogram
|
||||||
|
*/
|
||||||
|
public DateHistogramGroupConfig getDateHistogram() {
|
||||||
|
return dateHistogram;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the configuration of the histogram
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public HistogramGroupConfig getHistogram() {
|
||||||
|
return histogram;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the configuration of the terms
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public TermsGroupConfig getTerms() {
|
||||||
|
return terms;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(DateHistogramGroupConfig.NAME, dateHistogram);
|
||||||
|
if (histogram != null) {
|
||||||
|
builder.field(HistogramGroupConfig.NAME, histogram);
|
||||||
|
}
|
||||||
|
if (terms != null) {
|
||||||
|
builder.field(TermsGroupConfig.NAME, terms);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final GroupConfig that = (GroupConfig) other;
|
||||||
|
return Objects.equals(dateHistogram, that.dateHistogram)
|
||||||
|
&& Objects.equals(histogram, that.histogram)
|
||||||
|
&& Objects.equals(terms, that.terms);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(dateHistogram, histogram, terms);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the histograms in the rollup config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "histogram": {
|
||||||
|
* "fields" : [ "foo", "bar" ],
|
||||||
|
* "interval" : 123
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class HistogramGroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "histogram";
|
||||||
|
private static final String INTERVAL = "interval";
|
||||||
|
private static final String FIELDS = "fields";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<HistogramGroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||||
|
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[1];
|
||||||
|
return new HistogramGroupConfig((long) args[0], fields != null ? fields.toArray(new String[fields.size()]) : null);
|
||||||
|
});
|
||||||
|
PARSER.declareLong(constructorArg(), new ParseField(INTERVAL));
|
||||||
|
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final long interval;
|
||||||
|
private final String[] fields;
|
||||||
|
|
||||||
|
public HistogramGroupConfig(final long interval, final String... fields) {
|
||||||
|
this.interval = interval;
|
||||||
|
this.fields = fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (fields == null || fields.length == 0) {
|
||||||
|
validationException.addValidationError("Fields must have at least one value");
|
||||||
|
}
|
||||||
|
if (interval <= 0) {
|
||||||
|
validationException.addValidationError("Interval must be a positive long");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getInterval() {
|
||||||
|
return interval;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String[] getFields() {
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(INTERVAL, interval);
|
||||||
|
builder.field(FIELDS, fields);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final HistogramGroupConfig that = (HistogramGroupConfig) other;
|
||||||
|
return Objects.equals(interval, that.interval) && Arrays.equals(fields, that.fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(interval, Arrays.hashCode(fields));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HistogramGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,135 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the metrics portion of a rollup job config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "metrics": [
|
||||||
|
* {
|
||||||
|
* "field": "foo",
|
||||||
|
* "metrics": [ "min", "max", "sum"]
|
||||||
|
* },
|
||||||
|
* {
|
||||||
|
* "field": "bar",
|
||||||
|
* "metrics": [ "max" ]
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class MetricConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "metrics";
|
||||||
|
private static final String FIELD = "field";
|
||||||
|
private static final String METRICS = "metrics";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<MetricConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||||
|
@SuppressWarnings("unchecked") List<String> metrics = (List<String>) args[1];
|
||||||
|
return new MetricConfig((String) args[0], metrics);
|
||||||
|
});
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(FIELD));
|
||||||
|
PARSER.declareStringArray(constructorArg(), new ParseField(METRICS));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String field;
|
||||||
|
private final List<String> metrics;
|
||||||
|
|
||||||
|
public MetricConfig(final String field, final List<String> metrics) {
|
||||||
|
this.field = field;
|
||||||
|
this.metrics = metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (field == null || field.isEmpty()) {
|
||||||
|
validationException.addValidationError("Field name is required");
|
||||||
|
}
|
||||||
|
if (metrics == null || metrics.isEmpty()) {
|
||||||
|
validationException.addValidationError("Metrics must be a non-null, non-empty array of strings");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the name of the field used in the metric configuration. Never {@code null}.
|
||||||
|
*/
|
||||||
|
public String getField() {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the names of the metrics used in the metric configuration. Never {@code null}.
|
||||||
|
*/
|
||||||
|
public List<String> getMetrics() {
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(FIELD, field);
|
||||||
|
builder.field(METRICS, metrics);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final MetricConfig that = (MetricConfig) other;
|
||||||
|
return Objects.equals(field, that.field) && Objects.equals(metrics, that.metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(field, metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static MetricConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,242 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class holds the configuration details of a rollup job, such as the groupings, metrics, what
|
||||||
|
* index to rollup and where to roll them to.
|
||||||
|
*/
|
||||||
|
public class RollupJobConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(20);
|
||||||
|
private static final String ID = "id";
|
||||||
|
private static final String TIMEOUT = "timeout";
|
||||||
|
private static final String CRON = "cron";
|
||||||
|
private static final String PAGE_SIZE = "page_size";
|
||||||
|
private static final String INDEX_PATTERN = "index_pattern";
|
||||||
|
private static final String ROLLUP_INDEX = "rollup_index";
|
||||||
|
|
||||||
|
private final String id;
|
||||||
|
private final String indexPattern;
|
||||||
|
private final String rollupIndex;
|
||||||
|
private final GroupConfig groupConfig;
|
||||||
|
private final List<MetricConfig> metricsConfig;
|
||||||
|
private final TimeValue timeout;
|
||||||
|
private final String cron;
|
||||||
|
private final int pageSize;
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<RollupJobConfig, String> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>("rollup_job_config", true, (args, optionalId) -> {
|
||||||
|
String id = args[0] != null ? (String) args[0] : optionalId;
|
||||||
|
String indexPattern = (String) args[1];
|
||||||
|
String rollupIndex = (String) args[2];
|
||||||
|
GroupConfig groupConfig = (GroupConfig) args[3];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<MetricConfig> metricsConfig = (List<MetricConfig>) args[4];
|
||||||
|
TimeValue timeout = (TimeValue) args[5];
|
||||||
|
String cron = (String) args[6];
|
||||||
|
int pageSize = (int) args[7];
|
||||||
|
return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groupConfig, metricsConfig, timeout);
|
||||||
|
});
|
||||||
|
PARSER.declareString(optionalConstructorArg(), new ParseField(ID));
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(INDEX_PATTERN));
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(ROLLUP_INDEX));
|
||||||
|
PARSER.declareObject(optionalConstructorArg(), (p, c) -> GroupConfig.fromXContent(p), new ParseField(GroupConfig.NAME));
|
||||||
|
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> MetricConfig.fromXContent(p), new ParseField(MetricConfig.NAME));
|
||||||
|
PARSER.declareField(optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), TIMEOUT),
|
||||||
|
new ParseField(TIMEOUT), ObjectParser.ValueType.STRING_OR_NULL);
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(CRON));
|
||||||
|
PARSER.declareInt(constructorArg(), new ParseField(PAGE_SIZE));
|
||||||
|
}
|
||||||
|
|
||||||
|
public RollupJobConfig(final String id,
|
||||||
|
final String indexPattern,
|
||||||
|
final String rollupIndex,
|
||||||
|
final String cron,
|
||||||
|
final int pageSize,
|
||||||
|
final GroupConfig groupConfig,
|
||||||
|
final List<MetricConfig> metricsConfig,
|
||||||
|
final @Nullable TimeValue timeout) {
|
||||||
|
this.id = id;
|
||||||
|
this.indexPattern = indexPattern;
|
||||||
|
this.rollupIndex = rollupIndex;
|
||||||
|
this.groupConfig = groupConfig;
|
||||||
|
this.metricsConfig = metricsConfig != null ? metricsConfig : Collections.emptyList();
|
||||||
|
this.timeout = timeout != null ? timeout : DEFAULT_TIMEOUT;
|
||||||
|
this.cron = cron;
|
||||||
|
this.pageSize = pageSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (id == null || id.isEmpty()) {
|
||||||
|
validationException.addValidationError("Id must be a non-null, non-empty string");
|
||||||
|
}
|
||||||
|
if (indexPattern == null || indexPattern.isEmpty()) {
|
||||||
|
validationException.addValidationError("Index pattern must be a non-null, non-empty string");
|
||||||
|
} else if (Regex.isMatchAllPattern(indexPattern)) {
|
||||||
|
validationException.addValidationError("Index pattern must not match all indices (as it would match it's own rollup index");
|
||||||
|
} else if (indexPattern != null && indexPattern.equals(rollupIndex)) {
|
||||||
|
validationException.addValidationError("Rollup index may not be the same as the index pattern");
|
||||||
|
} else if (Regex.isSimpleMatchPattern(indexPattern) && Regex.simpleMatch(indexPattern, rollupIndex)) {
|
||||||
|
validationException.addValidationError("Index pattern would match rollup index name which is not allowed");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rollupIndex == null || rollupIndex.isEmpty()) {
|
||||||
|
validationException.addValidationError("Rollup index must be a non-null, non-empty string");
|
||||||
|
}
|
||||||
|
if (cron == null || cron.isEmpty()) {
|
||||||
|
validationException.addValidationError("Cron schedule must be a non-null, non-empty string");
|
||||||
|
}
|
||||||
|
if (pageSize <= 0) {
|
||||||
|
validationException.addValidationError("Page size is mandatory and must be a positive long");
|
||||||
|
}
|
||||||
|
if (groupConfig == null && (metricsConfig == null || metricsConfig.isEmpty())) {
|
||||||
|
validationException.addValidationError("At least one grouping or metric must be configured");
|
||||||
|
}
|
||||||
|
if (groupConfig != null) {
|
||||||
|
final Optional<ValidationException> groupValidationErrors = groupConfig.validate();
|
||||||
|
if (groupValidationErrors != null && groupValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(groupValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (metricsConfig != null) {
|
||||||
|
for (MetricConfig metricConfig : metricsConfig) {
|
||||||
|
final Optional<ValidationException> metricsValidationErrors = metricConfig.validate();
|
||||||
|
if (metricsValidationErrors != null && metricsValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(metricsValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GroupConfig getGroupConfig() {
|
||||||
|
return groupConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<MetricConfig> getMetricsConfig() {
|
||||||
|
return metricsConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TimeValue getTimeout() {
|
||||||
|
return timeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getIndexPattern() {
|
||||||
|
return indexPattern;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRollupIndex() {
|
||||||
|
return rollupIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCron() {
|
||||||
|
return cron;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getPageSize() {
|
||||||
|
return pageSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(ID, id);
|
||||||
|
builder.field(INDEX_PATTERN, indexPattern);
|
||||||
|
builder.field(ROLLUP_INDEX, rollupIndex);
|
||||||
|
builder.field(CRON, cron);
|
||||||
|
if (groupConfig != null) {
|
||||||
|
builder.field(GroupConfig.NAME, groupConfig);
|
||||||
|
}
|
||||||
|
if (metricsConfig != null) {
|
||||||
|
builder.startArray(MetricConfig.NAME);
|
||||||
|
for (MetricConfig metric : metricsConfig) {
|
||||||
|
metric.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
}
|
||||||
|
if (timeout != null) {
|
||||||
|
builder.field(TIMEOUT, timeout.getStringRep());
|
||||||
|
}
|
||||||
|
builder.field(PAGE_SIZE, pageSize);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final RollupJobConfig that = (RollupJobConfig) other;
|
||||||
|
return Objects.equals(this.id, that.id)
|
||||||
|
&& Objects.equals(this.indexPattern, that.indexPattern)
|
||||||
|
&& Objects.equals(this.rollupIndex, that.rollupIndex)
|
||||||
|
&& Objects.equals(this.cron, that.cron)
|
||||||
|
&& Objects.equals(this.groupConfig, that.groupConfig)
|
||||||
|
&& Objects.equals(this.metricsConfig, that.metricsConfig)
|
||||||
|
&& Objects.equals(this.timeout, that.timeout)
|
||||||
|
&& Objects.equals(this.pageSize, that.pageSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(id, indexPattern, rollupIndex, cron, groupConfig, metricsConfig, timeout, pageSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RollupJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException {
|
||||||
|
return PARSER.parse(parser, optionalJobId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the histograms in the rollup config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "terms": {
|
||||||
|
* "fields" : [ "foo", "bar" ]
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class TermsGroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "terms";
|
||||||
|
private static final String FIELDS = "fields";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<TermsGroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||||
|
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[0];
|
||||||
|
return new TermsGroupConfig(fields != null ? fields.toArray(new String[fields.size()]) : null);
|
||||||
|
});
|
||||||
|
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String[] fields;
|
||||||
|
|
||||||
|
public TermsGroupConfig(final String... fields) {
|
||||||
|
this.fields = fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (fields == null || fields.length == 0) {
|
||||||
|
validationException.addValidationError("Fields must have at least one value");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the names of the fields. Never {@code null}.
|
||||||
|
*/
|
||||||
|
public String[] getFields() {
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(FIELDS, fields);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final TermsGroupConfig that = (TermsGroupConfig) other;
|
||||||
|
return Arrays.equals(fields, that.fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Arrays.hashCode(fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TermsGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
|
@ -227,6 +228,23 @@ public class MLRequestConvertersTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetDatafeed() {
|
||||||
|
GetDatafeedRequest getDatafeedRequest = new GetDatafeedRequest();
|
||||||
|
|
||||||
|
Request request = MLRequestConverters.getDatafeed(getDatafeedRequest);
|
||||||
|
|
||||||
|
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||||
|
assertEquals("/_xpack/ml/datafeeds", request.getEndpoint());
|
||||||
|
assertFalse(request.getParameters().containsKey("allow_no_datafeeds"));
|
||||||
|
|
||||||
|
getDatafeedRequest = new GetDatafeedRequest("feed-1", "feed-*");
|
||||||
|
getDatafeedRequest.setAllowNoDatafeeds(true);
|
||||||
|
request = MLRequestConverters.getDatafeed(getDatafeedRequest);
|
||||||
|
|
||||||
|
assertEquals("/_xpack/ml/datafeeds/feed-1,feed-*", request.getEndpoint());
|
||||||
|
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_datafeeds"));
|
||||||
|
}
|
||||||
|
|
||||||
public void testDeleteDatafeed() {
|
public void testDeleteDatafeed() {
|
||||||
String datafeedId = randomAlphaOfLength(10);
|
String datafeedId = randomAlphaOfLength(10);
|
||||||
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
||||||
|
|
|
@ -32,6 +32,8 @@ import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobResponse;
|
import org.elasticsearch.client.ml.GetJobResponse;
|
||||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
|
@ -58,6 +60,7 @@ import org.elasticsearch.client.ml.job.config.JobState;
|
||||||
import org.elasticsearch.client.ml.job.config.JobUpdate;
|
import org.elasticsearch.client.ml.job.config.JobUpdate;
|
||||||
import org.elasticsearch.client.ml.job.stats.JobStats;
|
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -316,6 +319,84 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
||||||
assertThat(createdDatafeed.getIndices(), equalTo(datafeedConfig.getIndices()));
|
assertThat(createdDatafeed.getIndices(), equalTo(datafeedConfig.getIndices()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetDatafeed() throws Exception {
|
||||||
|
String jobId1 = "test-get-datafeed-job-1";
|
||||||
|
String jobId2 = "test-get-datafeed-job-2";
|
||||||
|
Job job1 = buildJob(jobId1);
|
||||||
|
Job job2 = buildJob(jobId2);
|
||||||
|
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||||
|
machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
|
||||||
|
machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
String datafeedId1 = jobId1 + "-feed";
|
||||||
|
String datafeedId2 = jobId2 + "-feed";
|
||||||
|
DatafeedConfig datafeed1 = DatafeedConfig.builder(datafeedId1, jobId1).setIndices("data_1").build();
|
||||||
|
DatafeedConfig datafeed2 = DatafeedConfig.builder(datafeedId2, jobId2).setIndices("data_2").build();
|
||||||
|
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed1), RequestOptions.DEFAULT);
|
||||||
|
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed2), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
// Test getting specific datafeeds
|
||||||
|
{
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId1, datafeedId2);
|
||||||
|
GetDatafeedResponse response = execute(request, machineLearningClient::getDatafeed, machineLearningClient::getDatafeedAsync);
|
||||||
|
|
||||||
|
assertEquals(2, response.count());
|
||||||
|
assertThat(response.datafeeds(), hasSize(2));
|
||||||
|
assertThat(response.datafeeds().stream().map(DatafeedConfig::getId).collect(Collectors.toList()),
|
||||||
|
containsInAnyOrder(datafeedId1, datafeedId2));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test getting a single one
|
||||||
|
{
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId1);
|
||||||
|
GetDatafeedResponse response = execute(request, machineLearningClient::getDatafeed, machineLearningClient::getDatafeedAsync);
|
||||||
|
|
||||||
|
assertTrue(response.count() == 1L);
|
||||||
|
assertThat(response.datafeeds().get(0).getId(), equalTo(datafeedId1));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test getting all datafeeds explicitly
|
||||||
|
{
|
||||||
|
GetDatafeedRequest request = GetDatafeedRequest.getAllDatafeedsRequest();
|
||||||
|
GetDatafeedResponse response = execute(request, machineLearningClient::getDatafeed, machineLearningClient::getDatafeedAsync);
|
||||||
|
|
||||||
|
assertTrue(response.count() == 2L);
|
||||||
|
assertTrue(response.datafeeds().size() == 2L);
|
||||||
|
assertThat(response.datafeeds().stream().map(DatafeedConfig::getId).collect(Collectors.toList()),
|
||||||
|
hasItems(datafeedId1, datafeedId2));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test getting all datafeeds implicitly
|
||||||
|
{
|
||||||
|
GetDatafeedResponse response = execute(new GetDatafeedRequest(), machineLearningClient::getDatafeed,
|
||||||
|
machineLearningClient::getDatafeedAsync);
|
||||||
|
|
||||||
|
assertTrue(response.count() >= 2L);
|
||||||
|
assertTrue(response.datafeeds().size() >= 2L);
|
||||||
|
assertThat(response.datafeeds().stream().map(DatafeedConfig::getId).collect(Collectors.toList()),
|
||||||
|
hasItems(datafeedId1, datafeedId2));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test get missing pattern with allow_no_datafeeds set to true
|
||||||
|
{
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest("missing-*");
|
||||||
|
|
||||||
|
GetDatafeedResponse response = execute(request, machineLearningClient::getDatafeed, machineLearningClient::getDatafeedAsync);
|
||||||
|
|
||||||
|
assertThat(response.count(), equalTo(0L));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test get missing pattern with allow_no_datafeeds set to false
|
||||||
|
{
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest("missing-*");
|
||||||
|
request.setAllowNoDatafeeds(false);
|
||||||
|
|
||||||
|
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
|
||||||
|
() -> execute(request, machineLearningClient::getDatafeed, machineLearningClient::getDatafeedAsync));
|
||||||
|
assertThat(e.status(), equalTo(RestStatus.NOT_FOUND));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testDeleteDatafeed() throws Exception {
|
public void testDeleteDatafeed() throws Exception {
|
||||||
String jobId = randomValidJobId();
|
String jobId = randomValidJobId();
|
||||||
Job job = buildJob(jobId);
|
Job job = buildJob(jobId);
|
||||||
|
|
|
@ -768,6 +768,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||||
if (apiName.startsWith("xpack.") == false &&
|
if (apiName.startsWith("xpack.") == false &&
|
||||||
apiName.startsWith("license.") == false &&
|
apiName.startsWith("license.") == false &&
|
||||||
apiName.startsWith("machine_learning.") == false &&
|
apiName.startsWith("machine_learning.") == false &&
|
||||||
|
apiName.startsWith("rollup.") == false &&
|
||||||
apiName.startsWith("watcher.") == false &&
|
apiName.startsWith("watcher.") == false &&
|
||||||
apiName.startsWith("graph.") == false &&
|
apiName.startsWith("graph.") == false &&
|
||||||
apiName.startsWith("migration.") == false &&
|
apiName.startsWith("migration.") == false &&
|
||||||
|
|
|
@ -0,0 +1,162 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||||
|
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||||
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
|
import org.elasticsearch.action.search.SearchRequest;
|
||||||
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
import org.elasticsearch.action.support.WriteRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
|
||||||
|
public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
private static final List<String> SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
|
||||||
|
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void testPutRollupJob() throws Exception {
|
||||||
|
double sum = 0.0d;
|
||||||
|
int max = Integer.MIN_VALUE;
|
||||||
|
int min = Integer.MAX_VALUE;
|
||||||
|
|
||||||
|
final BulkRequest bulkRequest = new BulkRequest();
|
||||||
|
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||||
|
for (int minute = 0; minute < 60; minute++) {
|
||||||
|
for (int second = 0; second < 60; second = second + 10) {
|
||||||
|
final int value = randomIntBetween(0, 100);
|
||||||
|
|
||||||
|
final IndexRequest indexRequest = new IndexRequest("docs", "doc");
|
||||||
|
indexRequest.source(jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("value", value)
|
||||||
|
.field("date", String.format(Locale.ROOT, "2018-01-01T00:%02d:%02dZ", minute, second))
|
||||||
|
.endObject());
|
||||||
|
bulkRequest.add(indexRequest);
|
||||||
|
|
||||||
|
sum += value;
|
||||||
|
if (value > max) {
|
||||||
|
max = value;
|
||||||
|
}
|
||||||
|
if (value < min) {
|
||||||
|
min = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final int numDocs = bulkRequest.numberOfActions();
|
||||||
|
|
||||||
|
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||||
|
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||||
|
if (bulkResponse.hasFailures()) {
|
||||||
|
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||||
|
if (itemResponse.isFailed()) {
|
||||||
|
logger.fatal(itemResponse.getFailureMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertFalse(bulkResponse.hasFailures());
|
||||||
|
|
||||||
|
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||||
|
assertEquals(0, refreshResponse.getFailedShards());
|
||||||
|
|
||||||
|
final String id = randomAlphaOfLength(10);
|
||||||
|
final String indexPattern = randomFrom("docs", "d*", "doc*");
|
||||||
|
final String rollupIndex = randomFrom("rollup", "test");
|
||||||
|
final String cron = "*/1 * * * * ?";
|
||||||
|
final int pageSize = randomIntBetween(numDocs, numDocs * 10);
|
||||||
|
// TODO expand this to also test with histogram and terms?
|
||||||
|
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY));
|
||||||
|
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||||
|
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
|
||||||
|
|
||||||
|
PutRollupJobRequest putRollupJobRequest =
|
||||||
|
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||||
|
|
||||||
|
final RollupClient rollupClient = highLevelClient().rollup();
|
||||||
|
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||||
|
assertTrue(response.isAcknowledged());
|
||||||
|
|
||||||
|
// TODO Replace this with the Rollup Start Job API
|
||||||
|
Response startResponse = client().performRequest(new Request("POST", "/_xpack/rollup/job/" + id + "/_start"));
|
||||||
|
assertEquals(RestStatus.OK.getStatus(), startResponse.getHttpResponse().getStatusLine().getStatusCode());
|
||||||
|
|
||||||
|
int finalMin = min;
|
||||||
|
int finalMax = max;
|
||||||
|
double finalSum = sum;
|
||||||
|
assertBusy(() -> {
|
||||||
|
SearchResponse searchResponse = highLevelClient().search(new SearchRequest(rollupIndex), RequestOptions.DEFAULT);
|
||||||
|
assertEquals(0, searchResponse.getFailedShards());
|
||||||
|
assertEquals(1L, searchResponse.getHits().getTotalHits());
|
||||||
|
|
||||||
|
SearchHit searchHit = searchResponse.getHits().getAt(0);
|
||||||
|
Map<String, Object> source = searchHit.getSourceAsMap();
|
||||||
|
assertNotNull(source);
|
||||||
|
|
||||||
|
assertEquals(numDocs, source.get("date.date_histogram._count"));
|
||||||
|
assertEquals(groups.getDateHistogram().getInterval().toString(), source.get("date.date_histogram.interval"));
|
||||||
|
assertEquals(groups.getDateHistogram().getTimeZone(), source.get("date.date_histogram.time_zone"));
|
||||||
|
|
||||||
|
for (MetricConfig metric : metrics) {
|
||||||
|
for (String name : metric.getMetrics()) {
|
||||||
|
Number value = (Number) source.get(metric.getField() + "." + name + ".value");
|
||||||
|
if ("min".equals(name)) {
|
||||||
|
assertEquals(finalMin, value.intValue());
|
||||||
|
} else if ("max".equals(name)) {
|
||||||
|
assertEquals(finalMax, value.intValue());
|
||||||
|
} else if ("sum".equals(name)) {
|
||||||
|
assertEquals(finalSum, value.doubleValue(), 0.0d);
|
||||||
|
} else if ("avg".equals(name)) {
|
||||||
|
assertEquals(finalSum, value.doubleValue(), 0.0d);
|
||||||
|
Number avgCount = (Number) source.get(metric.getField() + "." + name + "._count");
|
||||||
|
assertEquals(numDocs, avgCount.intValue());
|
||||||
|
} else if ("value_count".equals(name)) {
|
||||||
|
assertEquals(numDocs, value.intValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -45,6 +45,8 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersResponse;
|
import org.elasticsearch.client.ml.GetInfluencersResponse;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
|
@ -208,14 +210,14 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-get-job-request
|
//tag::x-pack-ml-get-job-request
|
||||||
GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); //<1>
|
GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // <1>
|
||||||
request.setAllowNoJobs(true); //<2>
|
request.setAllowNoJobs(true); // <2>
|
||||||
//end::x-pack-ml-get-job-request
|
//end::x-pack-ml-get-job-request
|
||||||
|
|
||||||
//tag::x-pack-ml-get-job-execute
|
//tag::x-pack-ml-get-job-execute
|
||||||
GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT);
|
GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT);
|
||||||
long numberOfJobs = response.count(); //<1>
|
long numberOfJobs = response.count(); // <1>
|
||||||
List<Job> jobs = response.jobs(); //<2>
|
List<Job> jobs = response.jobs(); // <2>
|
||||||
//end::x-pack-ml-get-job-execute
|
//end::x-pack-ml-get-job-execute
|
||||||
|
|
||||||
assertEquals(2, response.count());
|
assertEquals(2, response.count());
|
||||||
|
@ -266,12 +268,12 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
{
|
{
|
||||||
//tag::x-pack-delete-ml-job-request
|
//tag::x-pack-delete-ml-job-request
|
||||||
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job");
|
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job");
|
||||||
deleteJobRequest.setForce(false); //<1>
|
deleteJobRequest.setForce(false); // <1>
|
||||||
AcknowledgedResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT);
|
AcknowledgedResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT);
|
||||||
//end::x-pack-delete-ml-job-request
|
//end::x-pack-delete-ml-job-request
|
||||||
|
|
||||||
//tag::x-pack-delete-ml-job-response
|
//tag::x-pack-delete-ml-job-response
|
||||||
boolean isAcknowledged = deleteJobResponse.isAcknowledged(); //<1>
|
boolean isAcknowledged = deleteJobResponse.isAcknowledged(); // <1>
|
||||||
//end::x-pack-delete-ml-job-response
|
//end::x-pack-delete-ml-job-response
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -313,13 +315,13 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-open-job-request
|
//tag::x-pack-ml-open-job-request
|
||||||
OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); //<1>
|
OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); // <1>
|
||||||
openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); //<2>
|
openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <2>
|
||||||
//end::x-pack-ml-open-job-request
|
//end::x-pack-ml-open-job-request
|
||||||
|
|
||||||
//tag::x-pack-ml-open-job-execute
|
//tag::x-pack-ml-open-job-execute
|
||||||
OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT);
|
OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT);
|
||||||
boolean isOpened = openJobResponse.isOpened(); //<1>
|
boolean isOpened = openJobResponse.isOpened(); // <1>
|
||||||
//end::x-pack-ml-open-job-execute
|
//end::x-pack-ml-open-job-execute
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -328,7 +330,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<OpenJobResponse> listener = new ActionListener<OpenJobResponse>() {
|
ActionListener<OpenJobResponse> listener = new ActionListener<OpenJobResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(OpenJobResponse openJobResponse) {
|
public void onResponse(OpenJobResponse openJobResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -343,7 +345,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-open-job-execute-async
|
// tag::x-pack-ml-open-job-execute-async
|
||||||
client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-open-job-execute-async
|
// end::x-pack-ml-open-job-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
@ -359,15 +361,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
|
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
//tag::x-pack-ml-close-job-request
|
//tag::x-pack-ml-close-job-request
|
||||||
CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); //<1>
|
CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); // <1>
|
||||||
closeJobRequest.setForce(false); //<2>
|
closeJobRequest.setForce(false); // <2>
|
||||||
closeJobRequest.setAllowNoJobs(true); //<3>
|
closeJobRequest.setAllowNoJobs(true); // <3>
|
||||||
closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); //<4>
|
closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <4>
|
||||||
//end::x-pack-ml-close-job-request
|
//end::x-pack-ml-close-job-request
|
||||||
|
|
||||||
//tag::x-pack-ml-close-job-execute
|
//tag::x-pack-ml-close-job-execute
|
||||||
CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT);
|
CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT);
|
||||||
boolean isClosed = closeJobResponse.isClosed(); //<1>
|
boolean isClosed = closeJobResponse.isClosed(); // <1>
|
||||||
//end::x-pack-ml-close-job-execute
|
//end::x-pack-ml-close-job-execute
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -380,7 +382,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<CloseJobResponse> listener = new ActionListener<CloseJobResponse>() {
|
ActionListener<CloseJobResponse> listener = new ActionListener<CloseJobResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(CloseJobResponse closeJobResponse) {
|
public void onResponse(CloseJobResponse closeJobResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -396,7 +398,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-close-job-execute-async
|
// tag::x-pack-ml-close-job-execute-async
|
||||||
client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-close-job-execute-async
|
// end::x-pack-ml-close-job-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
@ -427,37 +429,37 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
customSettings.put("custom-setting-1", "custom-value");
|
customSettings.put("custom-setting-1", "custom-value");
|
||||||
|
|
||||||
//tag::x-pack-ml-update-job-detector-options
|
//tag::x-pack-ml-update-job-detector-options
|
||||||
JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, //<1>
|
JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, // <1>
|
||||||
"detector description", //<2>
|
"detector description", // <2>
|
||||||
detectionRules); //<3>
|
detectionRules); // <3>
|
||||||
//end::x-pack-ml-update-job-detector-options
|
//end::x-pack-ml-update-job-detector-options
|
||||||
|
|
||||||
//tag::x-pack-ml-update-job-options
|
//tag::x-pack-ml-update-job-options
|
||||||
JobUpdate update = new JobUpdate.Builder(jobId) //<1>
|
JobUpdate update = new JobUpdate.Builder(jobId) // <1>
|
||||||
.setDescription("My description") //<2>
|
.setDescription("My description") // <2>
|
||||||
.setAnalysisLimits(new AnalysisLimits(1000L, null)) //<3>
|
.setAnalysisLimits(new AnalysisLimits(1000L, null)) // <3>
|
||||||
.setBackgroundPersistInterval(TimeValue.timeValueHours(3)) //<4>
|
.setBackgroundPersistInterval(TimeValue.timeValueHours(3)) // <4>
|
||||||
.setCategorizationFilters(Arrays.asList("categorization-filter")) //<5>
|
.setCategorizationFilters(Arrays.asList("categorization-filter")) // <5>
|
||||||
.setDetectorUpdates(Arrays.asList(detectorUpdate)) //<6>
|
.setDetectorUpdates(Arrays.asList(detectorUpdate)) // <6>
|
||||||
.setGroups(Arrays.asList("job-group-1")) //<7>
|
.setGroups(Arrays.asList("job-group-1")) // <7>
|
||||||
.setResultsRetentionDays(10L) //<8>
|
.setResultsRetentionDays(10L) // <8>
|
||||||
.setModelPlotConfig(new ModelPlotConfig(true, null)) //<9>
|
.setModelPlotConfig(new ModelPlotConfig(true, null)) // <9>
|
||||||
.setModelSnapshotRetentionDays(7L) //<10>
|
.setModelSnapshotRetentionDays(7L) // <10>
|
||||||
.setCustomSettings(customSettings) //<11>
|
.setCustomSettings(customSettings) // <11>
|
||||||
.setRenormalizationWindowDays(3L) //<12>
|
.setRenormalizationWindowDays(3L) // <12>
|
||||||
.build();
|
.build();
|
||||||
//end::x-pack-ml-update-job-options
|
//end::x-pack-ml-update-job-options
|
||||||
|
|
||||||
|
|
||||||
//tag::x-pack-ml-update-job-request
|
//tag::x-pack-ml-update-job-request
|
||||||
UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); //<1>
|
UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); // <1>
|
||||||
//end::x-pack-ml-update-job-request
|
//end::x-pack-ml-update-job-request
|
||||||
|
|
||||||
//tag::x-pack-ml-update-job-execute
|
//tag::x-pack-ml-update-job-execute
|
||||||
PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT);
|
PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT);
|
||||||
//end::x-pack-ml-update-job-execute
|
//end::x-pack-ml-update-job-execute
|
||||||
//tag::x-pack-ml-update-job-response
|
//tag::x-pack-ml-update-job-response
|
||||||
Job updatedJob = updateJobResponse.getResponse(); //<1>
|
Job updatedJob = updateJobResponse.getResponse(); // <1>
|
||||||
//end::x-pack-ml-update-job-response
|
//end::x-pack-ml-update-job-response
|
||||||
|
|
||||||
assertEquals(update.getDescription(), updatedJob.getDescription());
|
assertEquals(update.getDescription(), updatedJob.getDescription());
|
||||||
|
@ -467,7 +469,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
|
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(PutJobResponse updateJobResponse) {
|
public void onResponse(PutJobResponse updateJobResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -483,7 +485,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-update-job-execute-async
|
// tag::x-pack-ml-update-job-execute-async
|
||||||
client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-update-job-execute-async
|
// end::x-pack-ml-update-job-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
@ -590,6 +592,59 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetDatafeed() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
Job job = MachineLearningIT.buildJob("get-datafeed-job");
|
||||||
|
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||||
|
String datafeedId = job.getId() + "-feed";
|
||||||
|
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build();
|
||||||
|
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
{
|
||||||
|
//tag::x-pack-ml-get-datafeed-request
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // <1>
|
||||||
|
request.setAllowNoDatafeeds(true); // <2>
|
||||||
|
//end::x-pack-ml-get-datafeed-request
|
||||||
|
|
||||||
|
//tag::x-pack-ml-get-datafeed-execute
|
||||||
|
GetDatafeedResponse response = client.machineLearning().getDatafeed(request, RequestOptions.DEFAULT);
|
||||||
|
long numberOfDatafeeds = response.count(); // <1>
|
||||||
|
List<DatafeedConfig> datafeeds = response.datafeeds(); // <2>
|
||||||
|
//end::x-pack-ml-get-datafeed-execute
|
||||||
|
|
||||||
|
assertEquals(1, numberOfDatafeeds);
|
||||||
|
assertEquals(1, datafeeds.size());
|
||||||
|
}
|
||||||
|
{
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId);
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-datafeed-listener
|
||||||
|
ActionListener<GetDatafeedResponse> listener = new ActionListener<GetDatafeedResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(GetDatafeedResponse response) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::x-pack-ml-get-datafeed-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-datafeed-execute-async
|
||||||
|
client.machineLearning().getDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
// end::x-pack-ml-get-datafeed-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testDeleteDatafeed() throws Exception {
|
public void testDeleteDatafeed() throws Exception {
|
||||||
RestHighLevelClient client = highLevelClient();
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
@ -604,13 +659,13 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
{
|
{
|
||||||
//tag::x-pack-delete-ml-datafeed-request
|
//tag::x-pack-delete-ml-datafeed-request
|
||||||
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
||||||
deleteDatafeedRequest.setForce(false); //<1>
|
deleteDatafeedRequest.setForce(false); // <1>
|
||||||
AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed(
|
AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed(
|
||||||
deleteDatafeedRequest, RequestOptions.DEFAULT);
|
deleteDatafeedRequest, RequestOptions.DEFAULT);
|
||||||
//end::x-pack-delete-ml-datafeed-request
|
//end::x-pack-delete-ml-datafeed-request
|
||||||
|
|
||||||
//tag::x-pack-delete-ml-datafeed-response
|
//tag::x-pack-delete-ml-datafeed-response
|
||||||
boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); //<1>
|
boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); // <1>
|
||||||
//end::x-pack-delete-ml-datafeed-response
|
//end::x-pack-delete-ml-datafeed-response
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -759,15 +814,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-flush-job-request
|
//tag::x-pack-ml-flush-job-request
|
||||||
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); //<1>
|
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); // <1>
|
||||||
//end::x-pack-ml-flush-job-request
|
//end::x-pack-ml-flush-job-request
|
||||||
|
|
||||||
//tag::x-pack-ml-flush-job-request-options
|
//tag::x-pack-ml-flush-job-request-options
|
||||||
flushJobRequest.setCalcInterim(true); //<1>
|
flushJobRequest.setCalcInterim(true); // <1>
|
||||||
flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); //<2>
|
flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); // <2>
|
||||||
flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); //<3>
|
flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); // <3>
|
||||||
flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); //<4>
|
flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); // <4>
|
||||||
flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); //<5>
|
flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); // <5>
|
||||||
//end::x-pack-ml-flush-job-request-options
|
//end::x-pack-ml-flush-job-request-options
|
||||||
|
|
||||||
//tag::x-pack-ml-flush-job-execute
|
//tag::x-pack-ml-flush-job-execute
|
||||||
|
@ -775,8 +830,8 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
//end::x-pack-ml-flush-job-execute
|
//end::x-pack-ml-flush-job-execute
|
||||||
|
|
||||||
//tag::x-pack-ml-flush-job-response
|
//tag::x-pack-ml-flush-job-response
|
||||||
boolean isFlushed = flushJobResponse.isFlushed(); //<1>
|
boolean isFlushed = flushJobResponse.isFlushed(); // <1>
|
||||||
Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); //<2>
|
Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); // <2>
|
||||||
//end::x-pack-ml-flush-job-response
|
//end::x-pack-ml-flush-job-response
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -785,7 +840,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() {
|
ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(FlushJobResponse FlushJobResponse) {
|
public void onResponse(FlushJobResponse FlushJobResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -801,7 +856,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-flush-job-execute-async
|
// tag::x-pack-ml-flush-job-execute-async
|
||||||
client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-flush-job-execute-async
|
// end::x-pack-ml-flush-job-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
@ -839,13 +894,13 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-delete-forecast-request
|
//tag::x-pack-ml-delete-forecast-request
|
||||||
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); //<1>
|
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); // <1>
|
||||||
//end::x-pack-ml-delete-forecast-request
|
//end::x-pack-ml-delete-forecast-request
|
||||||
|
|
||||||
//tag::x-pack-ml-delete-forecast-request-options
|
//tag::x-pack-ml-delete-forecast-request-options
|
||||||
deleteForecastRequest.setForecastIds(forecastId); //<1>
|
deleteForecastRequest.setForecastIds(forecastId); // <1>
|
||||||
deleteForecastRequest.timeout("30s"); //<2>
|
deleteForecastRequest.timeout("30s"); // <2>
|
||||||
deleteForecastRequest.setAllowNoForecasts(true); //<3>
|
deleteForecastRequest.setAllowNoForecasts(true); // <3>
|
||||||
//end::x-pack-ml-delete-forecast-request-options
|
//end::x-pack-ml-delete-forecast-request-options
|
||||||
|
|
||||||
//tag::x-pack-ml-delete-forecast-execute
|
//tag::x-pack-ml-delete-forecast-execute
|
||||||
|
@ -854,7 +909,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
//end::x-pack-ml-delete-forecast-execute
|
//end::x-pack-ml-delete-forecast-execute
|
||||||
|
|
||||||
//tag::x-pack-ml-delete-forecast-response
|
//tag::x-pack-ml-delete-forecast-response
|
||||||
boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); //<1>
|
boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); // <1>
|
||||||
//end::x-pack-ml-delete-forecast-response
|
//end::x-pack-ml-delete-forecast-response
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -862,7 +917,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
|
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(AcknowledgedResponse DeleteForecastResponse) {
|
public void onResponse(AcknowledgedResponse DeleteForecastResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -879,7 +934,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-delete-forecast-execute-async
|
// tag::x-pack-ml-delete-forecast-execute-async
|
||||||
client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-delete-forecast-execute-async
|
// end::x-pack-ml-delete-forecast-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
@ -897,8 +952,8 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-get-job-stats-request
|
//tag::x-pack-ml-get-job-stats-request
|
||||||
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); //<1>
|
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // <1>
|
||||||
request.setAllowNoJobs(true); //<2>
|
request.setAllowNoJobs(true); // <2>
|
||||||
//end::x-pack-ml-get-job-stats-request
|
//end::x-pack-ml-get-job-stats-request
|
||||||
|
|
||||||
//tag::x-pack-ml-get-job-stats-execute
|
//tag::x-pack-ml-get-job-stats-execute
|
||||||
|
@ -906,8 +961,8 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
//end::x-pack-ml-get-job-stats-execute
|
//end::x-pack-ml-get-job-stats-execute
|
||||||
|
|
||||||
//tag::x-pack-ml-get-job-stats-response
|
//tag::x-pack-ml-get-job-stats-response
|
||||||
long numberOfJobStats = response.count(); //<1>
|
long numberOfJobStats = response.count(); // <1>
|
||||||
List<JobStats> jobStats = response.jobStats(); //<2>
|
List<JobStats> jobStats = response.jobStats(); // <2>
|
||||||
//end::x-pack-ml-get-job-stats-response
|
//end::x-pack-ml-get-job-stats-response
|
||||||
|
|
||||||
assertEquals(2, response.count());
|
assertEquals(2, response.count());
|
||||||
|
@ -964,12 +1019,12 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-forecast-job-request
|
//tag::x-pack-ml-forecast-job-request
|
||||||
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); //<1>
|
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // <1>
|
||||||
//end::x-pack-ml-forecast-job-request
|
//end::x-pack-ml-forecast-job-request
|
||||||
|
|
||||||
//tag::x-pack-ml-forecast-job-request-options
|
//tag::x-pack-ml-forecast-job-request-options
|
||||||
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); //<1>
|
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); // <1>
|
||||||
forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); //<2>
|
forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); // <2>
|
||||||
//end::x-pack-ml-forecast-job-request-options
|
//end::x-pack-ml-forecast-job-request-options
|
||||||
|
|
||||||
//tag::x-pack-ml-forecast-job-execute
|
//tag::x-pack-ml-forecast-job-execute
|
||||||
|
@ -977,8 +1032,8 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
//end::x-pack-ml-forecast-job-execute
|
//end::x-pack-ml-forecast-job-execute
|
||||||
|
|
||||||
//tag::x-pack-ml-forecast-job-response
|
//tag::x-pack-ml-forecast-job-response
|
||||||
boolean isAcknowledged = forecastJobResponse.isAcknowledged(); //<1>
|
boolean isAcknowledged = forecastJobResponse.isAcknowledged(); // <1>
|
||||||
String forecastId = forecastJobResponse.getForecastId(); //<2>
|
String forecastId = forecastJobResponse.getForecastId(); // <2>
|
||||||
//end::x-pack-ml-forecast-job-response
|
//end::x-pack-ml-forecast-job-response
|
||||||
assertTrue(isAcknowledged);
|
assertTrue(isAcknowledged);
|
||||||
assertNotNull(forecastId);
|
assertNotNull(forecastId);
|
||||||
|
@ -988,7 +1043,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() {
|
ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(ForecastJobResponse forecastJobResponse) {
|
public void onResponse(ForecastJobResponse forecastJobResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1004,7 +1059,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-forecast-job-execute-async
|
// tag::x-pack-ml-forecast-job-execute-async
|
||||||
client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-forecast-job-execute-async
|
// end::x-pack-ml-forecast-job-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
@ -1211,18 +1266,18 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
{
|
{
|
||||||
//tag::x-pack-ml-post-data-request
|
//tag::x-pack-ml-post-data-request
|
||||||
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); //<1>
|
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); // <1>
|
||||||
Map<String, Object> mapData = new HashMap<>();
|
Map<String, Object> mapData = new HashMap<>();
|
||||||
mapData.put("total", 109);
|
mapData.put("total", 109);
|
||||||
jsonBuilder.addDoc(mapData); //<2>
|
jsonBuilder.addDoc(mapData); // <2>
|
||||||
jsonBuilder.addDoc("{\"total\":1000}"); //<3>
|
jsonBuilder.addDoc("{\"total\":1000}"); // <3>
|
||||||
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); //<4>
|
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <4>
|
||||||
//end::x-pack-ml-post-data-request
|
//end::x-pack-ml-post-data-request
|
||||||
|
|
||||||
|
|
||||||
//tag::x-pack-ml-post-data-request-options
|
//tag::x-pack-ml-post-data-request-options
|
||||||
postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); //<1>
|
postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); // <1>
|
||||||
postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); //<2>
|
postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); // <2>
|
||||||
//end::x-pack-ml-post-data-request-options
|
//end::x-pack-ml-post-data-request-options
|
||||||
postDataRequest.setResetEnd(null);
|
postDataRequest.setResetEnd(null);
|
||||||
postDataRequest.setResetStart(null);
|
postDataRequest.setResetStart(null);
|
||||||
|
@ -1232,7 +1287,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
//end::x-pack-ml-post-data-execute
|
//end::x-pack-ml-post-data-execute
|
||||||
|
|
||||||
//tag::x-pack-ml-post-data-response
|
//tag::x-pack-ml-post-data-response
|
||||||
DataCounts dataCounts = postDataResponse.getDataCounts(); //<1>
|
DataCounts dataCounts = postDataResponse.getDataCounts(); // <1>
|
||||||
//end::x-pack-ml-post-data-response
|
//end::x-pack-ml-post-data-response
|
||||||
assertEquals(2, dataCounts.getInputRecordCount());
|
assertEquals(2, dataCounts.getInputRecordCount());
|
||||||
|
|
||||||
|
@ -1242,7 +1297,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() {
|
ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(PostDataResponse postDataResponse) {
|
public void onResponse(PostDataResponse postDataResponse) {
|
||||||
//<1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1255,14 +1310,14 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
Map<String, Object> mapData = new HashMap<>();
|
Map<String, Object> mapData = new HashMap<>();
|
||||||
mapData.put("total", 109);
|
mapData.put("total", 109);
|
||||||
jsonBuilder.addDoc(mapData);
|
jsonBuilder.addDoc(mapData);
|
||||||
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); //<1>
|
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <1>
|
||||||
|
|
||||||
// Replace the empty listener by a blocking listener in test
|
// Replace the empty listener by a blocking listener in test
|
||||||
final CountDownLatch latch = new CountDownLatch(1);
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
listener = new LatchedActionListener<>(listener, latch);
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
// tag::x-pack-ml-post-data-execute-async
|
// tag::x-pack-ml-post-data-execute-async
|
||||||
client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); //<1>
|
client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||||
// end::x-pack-ml-post-data-execute-async
|
// end::x-pack-ml-post-data-execute-async
|
||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
|
|
@ -0,0 +1,163 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.documentation;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.action.LatchedActionListener;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||||
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
|
import org.elasticsearch.action.support.WriteRequest;
|
||||||
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
|
import org.elasticsearch.client.RequestOptions;
|
||||||
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.HistogramGroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
|
||||||
|
public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUpDocs() throws IOException {
|
||||||
|
final BulkRequest bulkRequest = new BulkRequest();
|
||||||
|
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
final IndexRequest indexRequest = new IndexRequest("docs", "doc");
|
||||||
|
indexRequest.source(jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("timestamp", String.format(Locale.ROOT, "2018-01-01T00:%02d:00Z", i))
|
||||||
|
.field("hostname", 0)
|
||||||
|
.field("datacenter", 0)
|
||||||
|
.field("temperature", 0)
|
||||||
|
.field("voltage", 0)
|
||||||
|
.field("load", 0)
|
||||||
|
.field("net_in", 0)
|
||||||
|
.field("net_out", 0)
|
||||||
|
.endObject());
|
||||||
|
bulkRequest.add(indexRequest);
|
||||||
|
}
|
||||||
|
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||||
|
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||||
|
assertFalse(bulkResponse.hasFailures());
|
||||||
|
|
||||||
|
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||||
|
assertEquals(0, refreshResponse.getFailedShards());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreateRollupJob() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
final String indexPattern = "docs";
|
||||||
|
final String rollupIndex = "rollup";
|
||||||
|
final String cron = "*/1 * * * * ?";
|
||||||
|
final int pageSize = 100;
|
||||||
|
final TimeValue timeout = null;
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-group-config
|
||||||
|
DateHistogramGroupConfig dateHistogram =
|
||||||
|
new DateHistogramGroupConfig("timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1>
|
||||||
|
TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter"); // <2>
|
||||||
|
HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out"); // <3>
|
||||||
|
|
||||||
|
GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms); // <4>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-group-config
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-metrics-config
|
||||||
|
List<MetricConfig> metrics = new ArrayList<>(); // <1>
|
||||||
|
metrics.add(new MetricConfig("temperature", Arrays.asList("min", "max", "sum"))); // <2>
|
||||||
|
metrics.add(new MetricConfig("voltage", Arrays.asList("avg", "value_count"))); // <3>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-metrics-config
|
||||||
|
{
|
||||||
|
String id = "job_1";
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-config
|
||||||
|
RollupJobConfig config = new RollupJobConfig(id, // <1>
|
||||||
|
indexPattern, // <2>
|
||||||
|
rollupIndex, // <3>
|
||||||
|
cron, // <4>
|
||||||
|
pageSize, // <5>
|
||||||
|
groups, // <6>
|
||||||
|
metrics, // <7>
|
||||||
|
timeout); // <8>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-config
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-request
|
||||||
|
PutRollupJobRequest request = new PutRollupJobRequest(config); // <1>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-request
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-execute
|
||||||
|
PutRollupJobResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
|
||||||
|
//end::x-pack-rollup-put-rollup-job-execute
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-response
|
||||||
|
boolean acknowledged = response.isAcknowledged(); // <1>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-response
|
||||||
|
assertTrue(acknowledged);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
String id = "job_2";
|
||||||
|
RollupJobConfig config = new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout);
|
||||||
|
PutRollupJobRequest request = new PutRollupJobRequest(config);
|
||||||
|
// tag::x-pack-rollup-put-rollup-job-execute-listener
|
||||||
|
ActionListener<PutRollupJobResponse> listener = new ActionListener<PutRollupJobResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(PutRollupJobResponse response) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::x-pack-rollup-put-rollup-job-execute-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-put-rollup-job-execute-async
|
||||||
|
client.rollup().putRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
// end::x-pack-rollup-put-rollup-job-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,70 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class GetDatafeedRequestTests extends AbstractXContentTestCase<GetDatafeedRequest> {
|
||||||
|
|
||||||
|
public void testAllDatafeedRequest() {
|
||||||
|
GetDatafeedRequest request = GetDatafeedRequest.getAllDatafeedsRequest();
|
||||||
|
|
||||||
|
assertEquals(request.getDatafeedIds().size(), 1);
|
||||||
|
assertEquals(request.getDatafeedIds().get(0), "_all");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNewWithDatafeedId() {
|
||||||
|
Exception exception = expectThrows(NullPointerException.class, () -> new GetDatafeedRequest("feed",null));
|
||||||
|
assertEquals(exception.getMessage(), "datafeedIds must not contain null values");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetDatafeedRequest createTestInstance() {
|
||||||
|
int count = randomIntBetween(0, 10);
|
||||||
|
List<String> datafeedIds = new ArrayList<>(count);
|
||||||
|
|
||||||
|
for (int i = 0; i < count; i++) {
|
||||||
|
datafeedIds.add(DatafeedConfigTests.randomValidDatafeedId());
|
||||||
|
}
|
||||||
|
|
||||||
|
GetDatafeedRequest request = new GetDatafeedRequest(datafeedIds);
|
||||||
|
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setAllowNoDatafeeds(randomBoolean());
|
||||||
|
}
|
||||||
|
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetDatafeedRequest doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return GetDatafeedRequest.PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||||
|
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class GetDatafeedResponseTests extends AbstractXContentTestCase<GetDatafeedResponse> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetDatafeedResponse createTestInstance() {
|
||||||
|
int count = randomIntBetween(1, 5);
|
||||||
|
List<DatafeedConfig.Builder> results = new ArrayList<>(count);
|
||||||
|
for(int i = 0; i < count; i++) {
|
||||||
|
DatafeedConfigTests.createRandomBuilder();
|
||||||
|
results.add(DatafeedConfigTests.createRandomBuilder());
|
||||||
|
}
|
||||||
|
return new GetDatafeedResponse(results, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetDatafeedResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return GetDatafeedResponse.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return field -> !field.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
|
@ -64,6 +64,6 @@ public class GetJobRequestTests extends AbstractXContentTestCase<GetJobRequest>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean supportsUnknownFields() {
|
protected boolean supportsUnknownFields() {
|
||||||
return true;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,6 +44,10 @@ public class DatafeedConfigTests extends AbstractXContentTestCase<DatafeedConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DatafeedConfig createRandom() {
|
public static DatafeedConfig createRandom() {
|
||||||
|
return createRandomBuilder().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static DatafeedConfig.Builder createRandomBuilder() {
|
||||||
long bucketSpanMillis = 3600000;
|
long bucketSpanMillis = 3600000;
|
||||||
DatafeedConfig.Builder builder = constructBuilder();
|
DatafeedConfig.Builder builder = constructBuilder();
|
||||||
builder.setIndices(randomStringList(1, 10));
|
builder.setIndices(randomStringList(1, 10));
|
||||||
|
@ -99,7 +103,7 @@ public class DatafeedConfigTests extends AbstractXContentTestCase<DatafeedConfig
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
builder.setChunkingConfig(ChunkingConfigTests.createRandomizedChunk());
|
builder.setChunkingConfig(ChunkingConfigTests.createRandomizedChunk());
|
||||||
}
|
}
|
||||||
return builder.build();
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> randomStringList(int min, int max) {
|
public static List<String> randomStringList(int min, int max) {
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
|
||||||
|
public class PutRollupJobRequestTests extends AbstractXContentTestCase<PutRollupJobRequest> {
|
||||||
|
|
||||||
|
private String jobId;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUpOptionalId() {
|
||||||
|
jobId = randomAlphaOfLengthBetween(1, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobRequest createTestInstance() {
|
||||||
|
return new PutRollupJobRequest(RollupJobConfigTests.randomRollupJobConfig(jobId));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobRequest doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
final String optionalId = randomBoolean() ? jobId : null;
|
||||||
|
return new PutRollupJobRequest(RollupJobConfig.fromXContent(parser, optionalId));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRequireConfiguration() {
|
||||||
|
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new PutRollupJobRequest(null));
|
||||||
|
assertEquals("rollup job configuration is required", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class PutRollupJobResponseTests extends AbstractXContentTestCase<PutRollupJobResponse> {
|
||||||
|
|
||||||
|
private boolean acknowledged;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setupJobID() {
|
||||||
|
acknowledged = randomBoolean();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobResponse createTestInstance() {
|
||||||
|
return new PutRollupJobResponse(acknowledged);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return PutRollupJobResponse.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class DateHistogramGroupConfigTests extends AbstractXContentTestCase<DateHistogramGroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DateHistogramGroupConfig createTestInstance() {
|
||||||
|
return randomDateHistogramGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DateHistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return DateHistogramGroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullField() {
|
||||||
|
final DateHistogramGroupConfig config = new DateHistogramGroupConfig(null, DateHistogramInterval.DAY, null, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyField() {
|
||||||
|
final DateHistogramGroupConfig config = new DateHistogramGroupConfig("", DateHistogramInterval.DAY, null, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullInterval() {
|
||||||
|
final DateHistogramGroupConfig config = new DateHistogramGroupConfig("field", null, null, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Interval is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final DateHistogramGroupConfig config = randomDateHistogramGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static DateHistogramGroupConfig randomDateHistogramGroupConfig() {
|
||||||
|
final String field = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||||
|
final DateHistogramInterval interval = new DateHistogramInterval(randomPositiveTimeValue());
|
||||||
|
final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null;
|
||||||
|
final String timezone = randomBoolean() ? randomDateTimeZone().toString() : null;
|
||||||
|
return new DateHistogramGroupConfig(field, interval, delay, timezone);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,116 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GroupConfig createTestInstance() {
|
||||||
|
return randomGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return GroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullDateHistogramGroupConfig() {
|
||||||
|
final GroupConfig config = new GroupConfig(null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Date histogram must not be null")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateDateHistogramGroupConfigWithErrors() {
|
||||||
|
final DateHistogramGroupConfig dateHistogramGroupConfig = new DateHistogramGroupConfig(null, null, null, null);
|
||||||
|
|
||||||
|
final GroupConfig config = new GroupConfig(dateHistogramGroupConfig);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(2));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
containsInAnyOrder("Field name is required", "Interval is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateHistogramGroupConfigWithErrors() {
|
||||||
|
final HistogramGroupConfig histogramGroupConfig = new HistogramGroupConfig(0L);
|
||||||
|
|
||||||
|
final GroupConfig config = new GroupConfig(randomGroupConfig().getDateHistogram(), histogramGroupConfig, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(2));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
containsInAnyOrder("Fields must have at least one value", "Interval must be a positive long"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateTermsGroupConfigWithErrors() {
|
||||||
|
final TermsGroupConfig termsGroupConfig = new TermsGroupConfig();
|
||||||
|
|
||||||
|
final GroupConfig config = new GroupConfig(randomGroupConfig().getDateHistogram(), null, termsGroupConfig);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Fields must have at least one value"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final GroupConfig config = randomGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static GroupConfig randomGroupConfig() {
|
||||||
|
DateHistogramGroupConfig dateHistogram = DateHistogramGroupConfigTests.randomDateHistogramGroupConfig();
|
||||||
|
HistogramGroupConfig histogram = randomBoolean() ? HistogramGroupConfigTests.randomHistogramGroupConfig() : null;
|
||||||
|
TermsGroupConfig terms = randomBoolean() ? TermsGroupConfigTests.randomTermsGroupConfig() : null;
|
||||||
|
return new GroupConfig(dateHistogram, histogram, terms);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,109 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class HistogramGroupConfigTests extends AbstractXContentTestCase<HistogramGroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected HistogramGroupConfig createTestInstance() {
|
||||||
|
return randomHistogramGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected HistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return HistogramGroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullFields() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(60L);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidatEmptyFields() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(60L, Strings.EMPTY_ARRAY);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNegativeInterval() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(-1L, randomHistogramGroupConfig().getFields());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Interval must be a positive long")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateZeroInterval() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(0L, randomHistogramGroupConfig().getFields());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Interval must be a positive long")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final HistogramGroupConfig config = randomHistogramGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
static HistogramGroupConfig randomHistogramGroupConfig() {
|
||||||
|
final long interval = randomNonNegativeLong();
|
||||||
|
final String[] fields = new String[randomIntBetween(1, 10)];
|
||||||
|
for (int i = 0; i < fields.length; i++) {
|
||||||
|
fields[i] = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||||
|
}
|
||||||
|
return new HistogramGroupConfig(interval, fields);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class MetricConfigTests extends AbstractXContentTestCase<MetricConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected MetricConfig createTestInstance() {
|
||||||
|
return randomMetricConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected MetricConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return MetricConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullField() {
|
||||||
|
final MetricConfig config = new MetricConfig(null, randomMetricConfig().getMetrics());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyField() {
|
||||||
|
final MetricConfig config = new MetricConfig("", randomMetricConfig().getMetrics());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullListOfMetrics() {
|
||||||
|
final MetricConfig config = new MetricConfig("field", null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Metrics must be a non-null, non-empty array of strings")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyListOfMetrics() {
|
||||||
|
final MetricConfig config = new MetricConfig("field", Collections.emptyList());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Metrics must be a non-null, non-empty array of strings")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final MetricConfig config = randomMetricConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static MetricConfig randomMetricConfig() {
|
||||||
|
final List<String> metrics = new ArrayList<>();
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("min");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("max");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("sum");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("avg");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("value_count");
|
||||||
|
}
|
||||||
|
if (metrics.size() == 0) {
|
||||||
|
metrics.add("min");
|
||||||
|
}
|
||||||
|
// large name so we don't accidentally collide
|
||||||
|
return new MetricConfig(randomAlphaOfLengthBetween(15, 25), Collections.unmodifiableList(metrics));
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,308 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
|
import static java.util.Collections.unmodifiableList;
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class RollupJobConfigTests extends AbstractXContentTestCase<RollupJobConfig> {
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUpOptionalId() {
|
||||||
|
id = randomAlphaOfLengthBetween(1, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RollupJobConfig createTestInstance() {
|
||||||
|
return randomRollupJobConfig(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RollupJobConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return RollupJobConfig.fromXContent(parser, randomBoolean() ? id : null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullId() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(null, sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Id must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyId() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig("", sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Id must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullIndexPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), null, sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Index pattern must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyIndexPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "", sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Index pattern must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateMatchAllIndexPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "*", sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
contains("Index pattern must not match all indices (as it would match it's own rollup index"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateIndexPatternMatchesRollupIndex() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "rollup*", "rollup", sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Index pattern would match rollup index name which is not allowed"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateSameIndexAndRollupPatterns() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "test", "test", sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Rollup index may not be the same as the index pattern"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullRollupPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), null, sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Rollup index must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyRollupPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), "", sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Rollup index must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullCron() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), null,
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Cron schedule must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyCron() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), "",
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Cron schedule must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidatePageSize() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), 0, sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Page size is mandatory and must be a positive long"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateGroupOrMetrics() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), sample.getPageSize(), null, null, sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("At least one grouping or metric must be configured"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateGroupConfigWithErrors() {
|
||||||
|
final GroupConfig groupConfig = new GroupConfig(null);
|
||||||
|
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), sample.getPageSize(), groupConfig, sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Date histogram must not be null"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateListOfMetricsWithErrors() {
|
||||||
|
final List<MetricConfig> metricsConfigs = singletonList(new MetricConfig(null, null));
|
||||||
|
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), sample.getPageSize(), sample.getGroupConfig(), metricsConfigs, sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(2));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
containsInAnyOrder("Field name is required", "Metrics must be a non-null, non-empty array of strings"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RollupJobConfig randomRollupJobConfig(final String id) {
|
||||||
|
final String indexPattern = randomAlphaOfLengthBetween(5, 20);
|
||||||
|
final String rollupIndex = "rollup_" + indexPattern;
|
||||||
|
final String cron = randomCron();
|
||||||
|
final int pageSize = randomIntBetween(1, 100);
|
||||||
|
final TimeValue timeout = randomBoolean() ? null :
|
||||||
|
new TimeValue(randomIntBetween(0, 60), randomFrom(Arrays.asList(TimeUnit.MILLISECONDS, TimeUnit.SECONDS, TimeUnit.MINUTES)));
|
||||||
|
final GroupConfig groups = GroupConfigTests.randomGroupConfig();
|
||||||
|
|
||||||
|
final List<MetricConfig> metrics = new ArrayList<>();
|
||||||
|
if (randomBoolean()) {
|
||||||
|
final int numMetrics = randomIntBetween(1, 10);
|
||||||
|
for (int i = 0; i < numMetrics; i++) {
|
||||||
|
metrics.add(MetricConfigTests.randomMetricConfig());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, unmodifiableList(metrics), timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String randomCron() {
|
||||||
|
return (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //second
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //minute
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 23))) + //hour
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1, 31))) + //day of month
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1, 12))) + //month
|
||||||
|
" ?" + //day of week
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1970, 2199))); //year
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class TermsGroupConfigTests extends AbstractXContentTestCase<TermsGroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TermsGroupConfig createTestInstance() {
|
||||||
|
return randomTermsGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TermsGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return TermsGroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullFields() {
|
||||||
|
final TermsGroupConfig config = new TermsGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidatEmptyFields() {
|
||||||
|
final TermsGroupConfig config = new TermsGroupConfig(Strings.EMPTY_ARRAY);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final TermsGroupConfig config = randomTermsGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static TermsGroupConfig randomTermsGroupConfig() {
|
||||||
|
final String[] fields = new String[randomIntBetween(1, 10)];
|
||||||
|
for (int i = 0; i < fields.length; i++) {
|
||||||
|
fields[i] = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||||
|
}
|
||||||
|
return new TermsGroupConfig(fields);
|
||||||
|
}
|
||||||
|
}
|
|
@ -36,6 +36,7 @@ public class DeadHostStateTests extends RestClientTestCase {
|
||||||
private static long[] EXPECTED_TIMEOUTS_SECONDS = new long[]{60, 84, 120, 169, 240, 339, 480, 678, 960, 1357, 1800};
|
private static long[] EXPECTED_TIMEOUTS_SECONDS = new long[]{60, 84, 120, 169, 240, 339, 480, 678, 960, 1357, 1800};
|
||||||
|
|
||||||
public void testInitialDeadHostStateDefaultTimeSupplier() {
|
public void testInitialDeadHostStateDefaultTimeSupplier() {
|
||||||
|
assumeFalse("https://github.com/elastic/elasticsearch/issues/33747", System.getProperty("os.name").startsWith("Windows"));
|
||||||
DeadHostState deadHostState = new DeadHostState(DeadHostState.TimeSupplier.DEFAULT);
|
DeadHostState deadHostState = new DeadHostState(DeadHostState.TimeSupplier.DEFAULT);
|
||||||
long currentTime = System.nanoTime();
|
long currentTime = System.nanoTime();
|
||||||
assertThat(deadHostState.getDeadUntilNanos(), greaterThan(currentTime));
|
assertThat(deadHostState.getDeadUntilNanos(), greaterThan(currentTime));
|
||||||
|
@ -54,6 +55,7 @@ public class DeadHostStateTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCompareToDefaultTimeSupplier() {
|
public void testCompareToDefaultTimeSupplier() {
|
||||||
|
assumeFalse("https://github.com/elastic/elasticsearch/issues/33747", System.getProperty("os.name").startsWith("Windows"));
|
||||||
int numObjects = randomIntBetween(EXPECTED_TIMEOUTS_SECONDS.length, 30);
|
int numObjects = randomIntBetween(EXPECTED_TIMEOUTS_SECONDS.length, 30);
|
||||||
DeadHostState[] deadHostStates = new DeadHostState[numObjects];
|
DeadHostState[] deadHostStates = new DeadHostState[numObjects];
|
||||||
for (int i = 0; i < numObjects; i++) {
|
for (int i = 0; i < numObjects; i++) {
|
||||||
|
|
|
@ -55,7 +55,6 @@ integTestCluster {
|
||||||
setting 'reindex.remote.whitelist', '127.0.0.1:*'
|
setting 'reindex.remote.whitelist', '127.0.0.1:*'
|
||||||
|
|
||||||
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
|
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
|
||||||
systemProperty 'es.scripting.use_java_time', 'false'
|
|
||||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||||
//TODO: remove this once the cname is prepended to the address by default in 7.0
|
//TODO: remove this once the cname is prepended to the address by default in 7.0
|
||||||
systemProperty 'es.http.cname_in_publish_address', 'true'
|
systemProperty 'es.http.cname_in_publish_address', 'true'
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
[[java-rest-high-x-pack-ml-get-datafeed]]
|
||||||
|
=== Get Datafeed API
|
||||||
|
|
||||||
|
The Get Datafeed API provides the ability to get {ml} datafeeds in the cluster.
|
||||||
|
It accepts a `GetDatafeedRequest` object and responds
|
||||||
|
with a `GetDatafeedResponse` object.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-datafeed-request]]
|
||||||
|
==== Get Datafeed Request
|
||||||
|
|
||||||
|
A `GetDatafeedRequest` object gets can have any number of `datafeedId` entries.
|
||||||
|
However, they all must be non-null. An empty list is the same as requesting for all datafeeds.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Constructing a new request referencing existing `datafeedIds`, can contain wildcards
|
||||||
|
<2> Whether to ignore if a wildcard expression matches no datafeeds.
|
||||||
|
(This includes `_all` string or when no datafeeds have been specified)
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-datafeed-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
The request can be executed through the `MachineLearningClient` contained
|
||||||
|
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The count of retrieved datafeeds
|
||||||
|
<2> The retrieved datafeeds
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-datafeed-execution-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
The request can also be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `GetDatafeedRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||||
|
to notify the caller of completion. A typical `ActionListener` for `GetDatafeedResponse` may
|
||||||
|
look like
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `onResponse` is called back when the action is completed successfully
|
||||||
|
<2> `onFailure` is called back when some unexpected error occurs
|
|
@ -0,0 +1,172 @@
|
||||||
|
[[java-rest-high-x-pack-rollup-put-job]]
|
||||||
|
=== Put Rollup Job API
|
||||||
|
|
||||||
|
The Put Rollup Job API can be used to create a new Rollup job
|
||||||
|
in the cluster. The API accepts a `PutRollupJobRequest` object
|
||||||
|
as a request and returns a `PutRollupJobResponse`.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-request]]
|
||||||
|
==== Put Rollup Job Request
|
||||||
|
|
||||||
|
A `PutRollupJobRequest` requires the following argument:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The configuration of the Rollup job to create as a `RollupJobConfig`
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-config]]
|
||||||
|
==== Rollup Job Configuration
|
||||||
|
|
||||||
|
The `RollupJobConfig` object contains all the details about the rollup job
|
||||||
|
configuration. See {ref}/rollup-job-config.html[Rollup configuration] to learn more
|
||||||
|
about the various configuration settings.
|
||||||
|
|
||||||
|
A `RollupJobConfig` requires the following arguments:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-config]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The name of the Rollup job
|
||||||
|
<2> The index (or index pattern) to rollup
|
||||||
|
<3> The index to store rollup results into
|
||||||
|
<4> A cron expression which defines when the Rollup job should be executed
|
||||||
|
<5> The page size to use for the Rollup job
|
||||||
|
<6> The grouping configuration of the Rollup job as a `GroupConfig`
|
||||||
|
<7> The metrics configuration of the Rollup job as a list of `MetricConfig`
|
||||||
|
<8> The timeout value to use for the Rollup job as a `TimeValue`
|
||||||
|
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-group-config]]
|
||||||
|
==== Grouping Configuration
|
||||||
|
|
||||||
|
The grouping configuration of the Rollup job is defined in the `RollupJobConfig`
|
||||||
|
using a `GroupConfig` instance. `GroupConfig` reflects all the configuration
|
||||||
|
settings that can be defined using the REST API. See {ref}/rollup-job-config.html#rollup-groups-config[Grouping Config]
|
||||||
|
to learn more about these settings.
|
||||||
|
|
||||||
|
Using the REST API, we could define this grouping configuration:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
"groups" : {
|
||||||
|
"date_histogram": {
|
||||||
|
"field": "timestamp",
|
||||||
|
"interval": "1h",
|
||||||
|
"delay": "7d",
|
||||||
|
"time_zone": "UTC"
|
||||||
|
},
|
||||||
|
"terms": {
|
||||||
|
"fields": ["hostname", "datacenter"]
|
||||||
|
},
|
||||||
|
"histogram": {
|
||||||
|
"fields": ["load", "net_in", "net_out"],
|
||||||
|
"interval": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
Using the `GroupConfig` object and the high level REST client, the same
|
||||||
|
configuration would be:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-group-config]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The date histogram aggregation to use to rollup up documents, as a `DateHistogramGroupConfig`
|
||||||
|
<2> The terms aggregation to use to rollup up documents, as a `TermsGroupConfig`
|
||||||
|
<3> The histogram aggregation to use to rollup up documents, as a `HistogramGroupConfig`
|
||||||
|
<4> The grouping configuration as a `GroupConfig`
|
||||||
|
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-metrics-config]]
|
||||||
|
==== Metrics Configuration
|
||||||
|
|
||||||
|
After defining which groups should be generated for the data, you next configure
|
||||||
|
which metrics should be collected. The list of metrics is defined in the `RollupJobConfig`
|
||||||
|
using a `List<MetricConfig>` instance. `MetricConfig` reflects all the configuration
|
||||||
|
settings that can be defined using the REST API. See {ref}/rollup-job-config.html#rollup-metrics-config[Metrics Config]
|
||||||
|
to learn more about these settings.
|
||||||
|
|
||||||
|
Using the REST API, we could define this metrics configuration:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"field": "temperature",
|
||||||
|
"metrics": ["min", "max", "sum"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "voltage",
|
||||||
|
"metrics": ["avg", "value_count"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
--------------------------------------------------
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
Using the `MetricConfig` object and the high level REST client, the same
|
||||||
|
configuration would be:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-metrics-config]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The list of `MetricConfig` to configure in the `RollupJobConfig`
|
||||||
|
<2> Adds the metrics to compute on the `temperature` field
|
||||||
|
<3> Adds the metrics to compute on the `voltage` field
|
||||||
|
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
The Put Rollup Job API can be executed through a `RollupClient`
|
||||||
|
instance. Such instance can be retrieved from a `RestHighLevelClient`
|
||||||
|
using the `rollup()` method:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-response]]
|
||||||
|
==== Response
|
||||||
|
|
||||||
|
The returned `PutRollupJobResponse` indicates if the new Rollup job
|
||||||
|
has been successfully created:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-response]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `acknowledged` is a boolean indicating whether the job was successfully created
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
This request can be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `PutRollupJobRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once it is
|
||||||
|
completed the `ActionListener` is called back using the `onResponse` method
|
||||||
|
if the execution successfully completed or using the `onFailure` method if
|
||||||
|
it failed.
|
||||||
|
|
||||||
|
A typical listener for `PutRollupJobResponse` looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-execute-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Called when the execution is successfully completed. The response is
|
||||||
|
provided as an argument
|
||||||
|
<2> Called in case of failure. The raised exception is provided as an argument
|
|
@ -221,6 +221,7 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
||||||
* <<java-rest-high-x-pack-ml-update-job>>
|
* <<java-rest-high-x-pack-ml-update-job>>
|
||||||
* <<java-rest-high-x-pack-ml-get-job-stats>>
|
* <<java-rest-high-x-pack-ml-get-job-stats>>
|
||||||
* <<java-rest-high-x-pack-ml-put-datafeed>>
|
* <<java-rest-high-x-pack-ml-put-datafeed>>
|
||||||
|
* <<java-rest-high-x-pack-ml-get-datafeed>>
|
||||||
* <<java-rest-high-x-pack-ml-delete-datafeed>>
|
* <<java-rest-high-x-pack-ml-delete-datafeed>>
|
||||||
* <<java-rest-high-x-pack-ml-forecast-job>>
|
* <<java-rest-high-x-pack-ml-forecast-job>>
|
||||||
* <<java-rest-high-x-pack-ml-delete-forecast>>
|
* <<java-rest-high-x-pack-ml-delete-forecast>>
|
||||||
|
@ -240,6 +241,7 @@ include::ml/close-job.asciidoc[]
|
||||||
include::ml/update-job.asciidoc[]
|
include::ml/update-job.asciidoc[]
|
||||||
include::ml/flush-job.asciidoc[]
|
include::ml/flush-job.asciidoc[]
|
||||||
include::ml/put-datafeed.asciidoc[]
|
include::ml/put-datafeed.asciidoc[]
|
||||||
|
include::ml/get-datafeed.asciidoc[]
|
||||||
include::ml/delete-datafeed.asciidoc[]
|
include::ml/delete-datafeed.asciidoc[]
|
||||||
include::ml/get-job-stats.asciidoc[]
|
include::ml/get-job-stats.asciidoc[]
|
||||||
include::ml/forecast-job.asciidoc[]
|
include::ml/forecast-job.asciidoc[]
|
||||||
|
@ -260,6 +262,14 @@ The Java High Level REST Client supports the following Migration APIs:
|
||||||
|
|
||||||
include::migration/get-assistance.asciidoc[]
|
include::migration/get-assistance.asciidoc[]
|
||||||
|
|
||||||
|
== Rollup APIs
|
||||||
|
|
||||||
|
The Java High Level REST Client supports the following Rollup APIs:
|
||||||
|
|
||||||
|
* <<java-rest-high-x-pack-rollup-put-job>>
|
||||||
|
|
||||||
|
include::rollup/put_job.asciidoc[]
|
||||||
|
|
||||||
== Security APIs
|
== Security APIs
|
||||||
|
|
||||||
The Java High Level REST Client supports the following Security APIs:
|
The Java High Level REST Client supports the following Security APIs:
|
||||||
|
|
|
@ -220,11 +220,6 @@ GET hockey/_search
|
||||||
}
|
}
|
||||||
----------------------------------------------------------------
|
----------------------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[warning:The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true to use the java time api for date field doc values]
|
|
||||||
|
|
||||||
NOTE: Date fields are changing in 7.0 to be exposed as `ZonedDateTime`
|
|
||||||
from Java 8's time API. To switch to this functionality early,
|
|
||||||
add `-Des.scripting.use_java_time=true` to `jvm.options`.
|
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[modules-scripting-painless-regex]]
|
[[modules-scripting-painless-regex]]
|
||||||
|
|
|
@ -416,7 +416,7 @@ POST /sales/_search?size=0
|
||||||
"terms": {
|
"terms": {
|
||||||
"script": {
|
"script": {
|
||||||
"lang": "painless",
|
"lang": "painless",
|
||||||
"source": "doc['date'].value.dayOfWeek"
|
"source": "doc['date'].value.dayOfWeekEnum.value"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -425,7 +425,6 @@ POST /sales/_search?size=0
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:sales]
|
// TEST[setup:sales]
|
||||||
// TEST[warning:The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true to use the java time api for date field doc values]
|
|
||||||
|
|
||||||
Response:
|
Response:
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,6 @@ esplugin {
|
||||||
|
|
||||||
integTestCluster {
|
integTestCluster {
|
||||||
module project.project(':modules:mapper-extras')
|
module project.project(':modules:mapper-extras')
|
||||||
systemProperty 'es.scripting.use_java_time', 'true'
|
|
||||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||||
systemProperty 'es.http.cname_in_publish_address', 'true'
|
systemProperty 'es.http.cname_in_publish_address', 'true'
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,8 +48,7 @@ public final class Whitelist {
|
||||||
"java.util.txt",
|
"java.util.txt",
|
||||||
"java.util.function.txt",
|
"java.util.function.txt",
|
||||||
"java.util.regex.txt",
|
"java.util.regex.txt",
|
||||||
"java.util.stream.txt",
|
"java.util.stream.txt"
|
||||||
"joda.time.txt"
|
|
||||||
};
|
};
|
||||||
|
|
||||||
public static final List<Whitelist> BASE_WHITELISTS =
|
public static final List<Whitelist> BASE_WHITELISTS =
|
||||||
|
|
|
@ -1,60 +0,0 @@
|
||||||
#
|
|
||||||
# Licensed to Elasticsearch under one or more contributor
|
|
||||||
# license agreements. See the NOTICE file distributed with
|
|
||||||
# this work for additional information regarding copyright
|
|
||||||
# ownership. Elasticsearch licenses this file to you under
|
|
||||||
# the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing,
|
|
||||||
# software distributed under the License is distributed on an
|
|
||||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
# KIND, either express or implied. See the License for the
|
|
||||||
# specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# Painless definition file. This defines the hierarchy of classes,
|
|
||||||
# what methods and fields they have, etc.
|
|
||||||
#
|
|
||||||
|
|
||||||
# NOTE: this just minimal whitelisting of joda time, just to provide
|
|
||||||
# convenient access via the scripting API. classes are fully qualified to avoid
|
|
||||||
# any confusion with java.time
|
|
||||||
|
|
||||||
class org.joda.time.ReadableInstant {
|
|
||||||
boolean equals(Object)
|
|
||||||
long getMillis()
|
|
||||||
int hashCode()
|
|
||||||
boolean isAfter(org.joda.time.ReadableInstant)
|
|
||||||
boolean isBefore(org.joda.time.ReadableInstant)
|
|
||||||
boolean isEqual(org.joda.time.ReadableInstant)
|
|
||||||
String toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
class org.joda.time.ReadableDateTime {
|
|
||||||
int getCenturyOfEra()
|
|
||||||
int getDayOfMonth()
|
|
||||||
int getDayOfWeek()
|
|
||||||
int getDayOfYear()
|
|
||||||
int getEra()
|
|
||||||
int getHourOfDay()
|
|
||||||
int getMillisOfDay()
|
|
||||||
int getMillisOfSecond()
|
|
||||||
int getMinuteOfDay()
|
|
||||||
int getMinuteOfHour()
|
|
||||||
int getMonthOfYear()
|
|
||||||
int getSecondOfDay()
|
|
||||||
int getSecondOfMinute()
|
|
||||||
int getWeekOfWeekyear()
|
|
||||||
int getWeekyear()
|
|
||||||
int getYear()
|
|
||||||
int getYearOfCentury()
|
|
||||||
int getYearOfEra()
|
|
||||||
String toString(String)
|
|
||||||
String toString(String,Locale)
|
|
||||||
}
|
|
|
@ -76,9 +76,93 @@ class org.elasticsearch.index.fielddata.ScriptDocValues$Longs {
|
||||||
List getValues()
|
List getValues()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class org.elasticsearch.script.JodaCompatibleZonedDateTime {
|
||||||
|
##### ZonedDateTime methods
|
||||||
|
int getDayOfMonth()
|
||||||
|
int getDayOfYear()
|
||||||
|
int getHour()
|
||||||
|
LocalDate toLocalDate()
|
||||||
|
LocalDateTime toLocalDateTime()
|
||||||
|
int getMinute()
|
||||||
|
Month getMonth()
|
||||||
|
int getMonthValue()
|
||||||
|
int getNano()
|
||||||
|
int getSecond()
|
||||||
|
int getYear()
|
||||||
|
ZonedDateTime minus(TemporalAmount)
|
||||||
|
ZonedDateTime minus(long,TemporalUnit)
|
||||||
|
ZonedDateTime minusYears(long)
|
||||||
|
ZonedDateTime minusMonths(long)
|
||||||
|
ZonedDateTime minusWeeks(long)
|
||||||
|
ZonedDateTime minusDays(long)
|
||||||
|
ZonedDateTime minusHours(long)
|
||||||
|
ZonedDateTime minusMinutes(long)
|
||||||
|
ZonedDateTime minusSeconds(long)
|
||||||
|
ZonedDateTime minusNanos(long)
|
||||||
|
ZonedDateTime plus(TemporalAmount)
|
||||||
|
ZonedDateTime plus(long,TemporalUnit)
|
||||||
|
ZonedDateTime plusDays(long)
|
||||||
|
ZonedDateTime plusHours(long)
|
||||||
|
ZonedDateTime plusMinutes(long)
|
||||||
|
ZonedDateTime plusMonths(long)
|
||||||
|
ZonedDateTime plusNanos(long)
|
||||||
|
ZonedDateTime plusSeconds(long)
|
||||||
|
ZonedDateTime plusWeeks(long)
|
||||||
|
ZonedDateTime plusYears(long)
|
||||||
|
Instant toInstant()
|
||||||
|
OffsetDateTime toOffsetDateTime()
|
||||||
|
ZonedDateTime truncatedTo(TemporalUnit)
|
||||||
|
ZonedDateTime with(TemporalAdjuster)
|
||||||
|
ZonedDateTime with(TemporalField,long)
|
||||||
|
ZonedDateTime withDayOfMonth(int)
|
||||||
|
ZonedDateTime withDayOfYear(int)
|
||||||
|
ZonedDateTime withEarlierOffsetAtOverlap()
|
||||||
|
ZonedDateTime withFixedOffsetZone()
|
||||||
|
ZonedDateTime withHour(int)
|
||||||
|
ZonedDateTime withLaterOffsetAtOverlap()
|
||||||
|
ZonedDateTime withMinute(int)
|
||||||
|
ZonedDateTime withMonth(int)
|
||||||
|
ZonedDateTime withNano(int)
|
||||||
|
ZonedDateTime withSecond(int)
|
||||||
|
ZonedDateTime withYear(int)
|
||||||
|
ZonedDateTime withZoneSameLocal(ZoneId)
|
||||||
|
ZonedDateTime withZoneSameInstant(ZoneId)
|
||||||
|
|
||||||
|
#### Joda methods that exist in java time
|
||||||
|
boolean equals(Object)
|
||||||
|
int hashCode()
|
||||||
|
boolean isAfter(ZonedDateTime)
|
||||||
|
boolean isBefore(ZonedDateTime)
|
||||||
|
boolean isEqual(ZonedDateTime)
|
||||||
|
String toString()
|
||||||
|
|
||||||
|
#### Joda time methods
|
||||||
|
long getMillis()
|
||||||
|
int getCenturyOfEra()
|
||||||
|
int getEra()
|
||||||
|
int getHourOfDay()
|
||||||
|
int getMillisOfDay()
|
||||||
|
int getMillisOfSecond()
|
||||||
|
int getMinuteOfDay()
|
||||||
|
int getMinuteOfHour()
|
||||||
|
int getMonthOfYear()
|
||||||
|
int getSecondOfDay()
|
||||||
|
int getSecondOfMinute()
|
||||||
|
int getWeekOfWeekyear()
|
||||||
|
int getWeekyear()
|
||||||
|
int getYearOfCentury()
|
||||||
|
int getYearOfEra()
|
||||||
|
String toString(String)
|
||||||
|
String toString(String,Locale)
|
||||||
|
|
||||||
|
# conflicting methods
|
||||||
|
DayOfWeek getDayOfWeekEnum()
|
||||||
|
int getDayOfWeek()
|
||||||
|
}
|
||||||
|
|
||||||
class org.elasticsearch.index.fielddata.ScriptDocValues$Dates {
|
class org.elasticsearch.index.fielddata.ScriptDocValues$Dates {
|
||||||
Object get(int)
|
JodaCompatibleZonedDateTime get(int)
|
||||||
Object getValue()
|
JodaCompatibleZonedDateTime getValue()
|
||||||
List getValues()
|
List getValues()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,10 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.painless;
|
package org.elasticsearch.painless;
|
||||||
|
|
||||||
import org.joda.time.DateTime;
|
|
||||||
import org.joda.time.DateTimeZone;
|
|
||||||
|
|
||||||
import java.lang.invoke.LambdaConversionException;
|
import java.lang.invoke.LambdaConversionException;
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
@ -59,15 +57,15 @@ public class FunctionRefTests extends ScriptTestCase {
|
||||||
public void testQualifiedVirtualMethodReference() {
|
public void testQualifiedVirtualMethodReference() {
|
||||||
long instant = randomLong();
|
long instant = randomLong();
|
||||||
assertEquals(instant, exec(
|
assertEquals(instant, exec(
|
||||||
"List l = [params.d]; return l.stream().mapToLong(org.joda.time.ReadableDateTime::getMillis).sum()",
|
"List l = [params.d]; return l.stream().mapToLong(Instant::toEpochMilli).sum()",
|
||||||
singletonMap("d", new DateTime(instant, DateTimeZone.UTC)), true));
|
singletonMap("d", Instant.ofEpochMilli(instant)), true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQualifiedVirtualMethodReferenceDef() {
|
public void testQualifiedVirtualMethodReferenceDef() {
|
||||||
long instant = randomLong();
|
long instant = randomLong();
|
||||||
assertEquals(instant, exec(
|
assertEquals(instant, exec(
|
||||||
"def l = [params.d]; return l.stream().mapToLong(org.joda.time.ReadableDateTime::getMillis).sum()",
|
"def l = [params.d]; return l.stream().mapToLong(Instant::toEpochMilli).sum()",
|
||||||
singletonMap("d", new DateTime(instant, DateTimeZone.UTC)), true));
|
singletonMap("d", Instant.ofEpochMilli(instant)), true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCtorMethodReference() {
|
public void testCtorMethodReference() {
|
||||||
|
@ -197,10 +195,10 @@ public class FunctionRefTests extends ScriptTestCase {
|
||||||
|
|
||||||
public void testQualifiedMethodMissing() {
|
public void testQualifiedMethodMissing() {
|
||||||
Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
|
Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||||
exec("List l = [2, 1]; l.sort(org.joda.time.ReadableDateTime::bogus); return l.get(0);", false);
|
exec("List l = [2, 1]; l.sort(java.time.Instant::bogus); return l.get(0);", false);
|
||||||
});
|
});
|
||||||
assertThat(e.getMessage(),
|
assertThat(e.getMessage(),
|
||||||
containsString("function reference [org.joda.time.ReadableDateTime::bogus/2] matching [java.util.Comparator"));
|
containsString("function reference [java.time.Instant::bogus/2] matching [java.util.Comparator, compare/2"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClassMissing() {
|
public void testClassMissing() {
|
||||||
|
|
|
@ -108,7 +108,7 @@ setup:
|
||||||
script_fields:
|
script_fields:
|
||||||
bar:
|
bar:
|
||||||
script:
|
script:
|
||||||
source: "doc.date.value.dayOfWeek.value"
|
source: "doc.date.value.dayOfWeekEnum.value"
|
||||||
|
|
||||||
- match: { hits.hits.0.fields.bar.0: 7}
|
- match: { hits.hits.0.fields.bar.0: 7}
|
||||||
|
|
||||||
|
@ -123,7 +123,7 @@ setup:
|
||||||
source: >
|
source: >
|
||||||
StringBuilder b = new StringBuilder();
|
StringBuilder b = new StringBuilder();
|
||||||
for (def date : doc.dates) {
|
for (def date : doc.dates) {
|
||||||
b.append(" ").append(date.getDayOfWeek().value);
|
b.append(" ").append(date.getDayOfWeekEnum().value);
|
||||||
}
|
}
|
||||||
return b.toString().trim()
|
return b.toString().trim()
|
||||||
|
|
||||||
|
|
|
@ -42,17 +42,12 @@ public class DiffableStringMap extends AbstractMap<String, String> implements Di
|
||||||
private final Map<String, String> innerMap;
|
private final Map<String, String> innerMap;
|
||||||
|
|
||||||
DiffableStringMap(final Map<String, String> map) {
|
DiffableStringMap(final Map<String, String> map) {
|
||||||
this.innerMap = map;
|
this.innerMap = Collections.unmodifiableMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
DiffableStringMap(final StreamInput in) throws IOException {
|
DiffableStringMap(final StreamInput in) throws IOException {
|
||||||
this.innerMap = (Map<String, String>) (Map) in.readMap();
|
this((Map<String, String>) (Map) in.readMap());
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String put(String key, String value) {
|
|
||||||
return innerMap.put(key, value);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -75,32 +70,6 @@ public class DiffableStringMap extends AbstractMap<String, String> implements Di
|
||||||
return new DiffableStringMapDiff(in);
|
return new DiffableStringMapDiff(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (obj == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (obj instanceof DiffableStringMap) {
|
|
||||||
DiffableStringMap other = (DiffableStringMap) obj;
|
|
||||||
return innerMap.equals(other.innerMap);
|
|
||||||
} else if (obj instanceof Map) {
|
|
||||||
Map other = (Map) obj;
|
|
||||||
return innerMap.equals(other);
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return innerMap.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "DiffableStringMap[" + innerMap.toString() + "]";
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents differences between two DiffableStringMaps.
|
* Represents differences between two DiffableStringMaps.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -466,7 +466,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, String> getCustomData(final String key) {
|
public Map<String, String> getCustomData(final String key) {
|
||||||
return Collections.unmodifiableMap(this.customData.get(key));
|
return this.customData.get(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ImmutableOpenIntMap<Set<String>> getInSyncAllocationIds() {
|
public ImmutableOpenIntMap<Set<String>> getInSyncAllocationIds() {
|
||||||
|
|
|
@ -61,6 +61,11 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
|
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
|
||||||
|
return canRebalance(allocation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
if (clusterConcurrentRebalance == -1) {
|
if (clusterConcurrentRebalance == -1) {
|
||||||
return allocation.decision(Decision.YES, NAME, "unlimited concurrent rebalances are allowed");
|
return allocation.decision(Decision.YES, NAME, "unlimited concurrent rebalances are allowed");
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,6 +121,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Cache the used disk percentage for displaying disk percentages consistent with documentation
|
// Cache the used disk percentage for displaying disk percentages consistent with documentation
|
||||||
double usedDiskPercentage = usage.getUsedDiskAsPercentage();
|
double usedDiskPercentage = usage.getUsedDiskAsPercentage();
|
||||||
long freeBytes = usage.getFreeBytes();
|
long freeBytes = usage.getFreeBytes();
|
||||||
|
ByteSizeValue freeBytesValue = new ByteSizeValue(freeBytes);
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage);
|
logger.trace("node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage);
|
||||||
}
|
}
|
||||||
|
@ -134,22 +135,22 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
if (freeBytes < diskThresholdSettings.getFreeBytesThresholdLow().getBytes()) {
|
if (freeBytes < diskThresholdSettings.getFreeBytesThresholdLow().getBytes()) {
|
||||||
if (skipLowTresholdChecks == false) {
|
if (skipLowTresholdChecks == false) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, preventing allocation",
|
logger.debug("less than the required {} free bytes threshold ({} free) on node {}, preventing allocation",
|
||||||
diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId());
|
diskThresholdSettings.getFreeBytesThresholdLow(), freeBytesValue, node.nodeId());
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.NO, NAME,
|
return allocation.decision(Decision.NO, NAME,
|
||||||
"the node is above the low watermark cluster setting [%s=%s], having less than the minimum required [%s] free " +
|
"the node is above the low watermark cluster setting [%s=%s], having less than the minimum required [%s] free " +
|
||||||
"space, actual free: [%s]",
|
"space, actual free: [%s]",
|
||||||
CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(),
|
CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(),
|
||||||
diskThresholdSettings.getLowWatermarkRaw(),
|
diskThresholdSettings.getLowWatermarkRaw(),
|
||||||
diskThresholdSettings.getFreeBytesThresholdLow(), new ByteSizeValue(freeBytes));
|
diskThresholdSettings.getFreeBytesThresholdLow(), freeBytesValue);
|
||||||
} else if (freeBytes > diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) {
|
} else if (freeBytes > diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) {
|
||||||
// Allow the shard to be allocated because it is primary that
|
// Allow the shard to be allocated because it is primary that
|
||||||
// has never been allocated if it's under the high watermark
|
// has never been allocated if it's under the high watermark
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
logger.debug("less than the required {} free bytes threshold ({} free) on node {}, " +
|
||||||
"but allowing allocation because primary has never been allocated",
|
"but allowing allocation because primary has never been allocated",
|
||||||
diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId());
|
diskThresholdSettings.getFreeBytesThresholdLow(), freeBytesValue, node.nodeId());
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.YES, NAME,
|
return allocation.decision(Decision.YES, NAME,
|
||||||
"the node is above the low watermark, but less than the high watermark, and this primary shard has " +
|
"the node is above the low watermark, but less than the high watermark, and this primary shard has " +
|
||||||
|
@ -158,16 +159,16 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Even though the primary has never been allocated, the node is
|
// Even though the primary has never been allocated, the node is
|
||||||
// above the high watermark, so don't allow allocating the shard
|
// above the high watermark, so don't allow allocating the shard
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
logger.debug("less than the required {} free bytes threshold ({} free) on node {}, " +
|
||||||
"preventing allocation even though primary has never been allocated",
|
"preventing allocation even though primary has never been allocated",
|
||||||
diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId());
|
diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesValue, node.nodeId());
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.NO, NAME,
|
return allocation.decision(Decision.NO, NAME,
|
||||||
"the node is above the high watermark cluster setting [%s=%s], having less than the minimum required [%s] free " +
|
"the node is above the high watermark cluster setting [%s=%s], having less than the minimum required [%s] free " +
|
||||||
"space, actual free: [%s]",
|
"space, actual free: [%s]",
|
||||||
CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(),
|
CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(),
|
||||||
diskThresholdSettings.getHighWatermarkRaw(),
|
diskThresholdSettings.getHighWatermarkRaw(),
|
||||||
diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes));
|
diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,15 +220,16 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
double freeSpaceAfterShard = freeDiskPercentageAfterShardAssigned(usage, shardSize);
|
double freeSpaceAfterShard = freeDiskPercentageAfterShardAssigned(usage, shardSize);
|
||||||
long freeBytesAfterShard = freeBytes - shardSize;
|
long freeBytesAfterShard = freeBytes - shardSize;
|
||||||
if (freeBytesAfterShard < diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) {
|
if (freeBytesAfterShard < diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) {
|
||||||
logger.warn("after allocating, node [{}] would have less than the required " +
|
logger.warn("after allocating, node [{}] would have less than the required threshold of " +
|
||||||
"{} free bytes threshold ({} bytes free), preventing allocation",
|
"{} free (currently {} free, estimated shard size is {}), preventing allocation",
|
||||||
node.nodeId(), diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesAfterShard);
|
node.nodeId(), diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesValue, new ByteSizeValue(shardSize));
|
||||||
return allocation.decision(Decision.NO, NAME,
|
return allocation.decision(Decision.NO, NAME,
|
||||||
"allocating the shard to this node will bring the node above the high watermark cluster setting [%s=%s] " +
|
"allocating the shard to this node will bring the node above the high watermark cluster setting [%s=%s] " +
|
||||||
"and cause it to have less than the minimum required [%s] of free space (free bytes after shard added: [%s])",
|
"and cause it to have less than the minimum required [%s] of free space (free: [%s], estimated shard size: [%s])",
|
||||||
CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(),
|
CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(),
|
||||||
diskThresholdSettings.getHighWatermarkRaw(),
|
diskThresholdSettings.getHighWatermarkRaw(),
|
||||||
diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytesAfterShard));
|
diskThresholdSettings.getFreeBytesThresholdHigh(),
|
||||||
|
freeBytesValue, new ByteSizeValue(shardSize));
|
||||||
}
|
}
|
||||||
if (freeSpaceAfterShard < diskThresholdSettings.getFreeDiskThresholdHigh()) {
|
if (freeSpaceAfterShard < diskThresholdSettings.getFreeDiskThresholdHigh()) {
|
||||||
logger.warn("after allocating, node [{}] would have more than the allowed " +
|
logger.warn("after allocating, node [{}] would have more than the allowed " +
|
||||||
|
@ -243,7 +245,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
|
|
||||||
return allocation.decision(Decision.YES, NAME,
|
return allocation.decision(Decision.YES, NAME,
|
||||||
"enough disk for shard on node, free: [%s], shard size: [%s], free after allocating shard: [%s]",
|
"enough disk for shard on node, free: [%s], shard size: [%s], free after allocating shard: [%s]",
|
||||||
new ByteSizeValue(freeBytes),
|
freeBytesValue,
|
||||||
new ByteSizeValue(shardSize),
|
new ByteSizeValue(shardSize),
|
||||||
new ByteSizeValue(freeBytesAfterShard));
|
new ByteSizeValue(freeBytesAfterShard));
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.io.stream.Writeable.Writer;
|
||||||
import org.elasticsearch.common.text.Text;
|
import org.elasticsearch.common.text.Text;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||||
|
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
import org.joda.time.ReadableInstant;
|
import org.joda.time.ReadableInstant;
|
||||||
|
|
||||||
|
@ -680,6 +681,15 @@ public abstract class StreamOutput extends OutputStream {
|
||||||
o.writeString(zonedDateTime.getZone().getId());
|
o.writeString(zonedDateTime.getZone().getId());
|
||||||
o.writeLong(zonedDateTime.toInstant().toEpochMilli());
|
o.writeLong(zonedDateTime.toInstant().toEpochMilli());
|
||||||
});
|
});
|
||||||
|
writers.put(JodaCompatibleZonedDateTime.class, (o, v) -> {
|
||||||
|
// write the joda compatibility datetime as joda datetime
|
||||||
|
o.writeByte((byte) 13);
|
||||||
|
final JodaCompatibleZonedDateTime zonedDateTime = (JodaCompatibleZonedDateTime) v;
|
||||||
|
String zoneId = zonedDateTime.getZonedDateTime().getZone().getId();
|
||||||
|
// joda does not understand "Z" for utc, so we must special case
|
||||||
|
o.writeString(zoneId.equals("Z") ? DateTimeZone.UTC.getID() : zoneId);
|
||||||
|
o.writeLong(zonedDateTime.toInstant().toEpochMilli());
|
||||||
|
});
|
||||||
WRITERS = Collections.unmodifiableMap(writers);
|
WRITERS = Collections.unmodifiableMap(writers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.time.DateFormatter;
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
import org.elasticsearch.common.time.DateFormatters;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
import org.joda.time.Instant;
|
import org.joda.time.Instant;
|
||||||
|
@ -93,6 +94,7 @@ public class XContentElasticsearchExtension implements XContentBuilderExtension
|
||||||
writers.put(Year.class, (b, v) -> b.value(v.toString()));
|
writers.put(Year.class, (b, v) -> b.value(v.toString()));
|
||||||
writers.put(Duration.class, (b, v) -> b.value(v.toString()));
|
writers.put(Duration.class, (b, v) -> b.value(v.toString()));
|
||||||
writers.put(Period.class, (b, v) -> b.value(v.toString()));
|
writers.put(Period.class, (b, v) -> b.value(v.toString()));
|
||||||
|
writers.put(JodaCompatibleZonedDateTime.class, XContentBuilder::timeValue);
|
||||||
|
|
||||||
writers.put(BytesReference.class, (b, v) -> {
|
writers.put(BytesReference.class, (b, v) -> {
|
||||||
if (v == null) {
|
if (v == null) {
|
||||||
|
@ -141,6 +143,8 @@ public class XContentElasticsearchExtension implements XContentBuilderExtension
|
||||||
d -> DEFAULT_FORMATTER.format(ZonedDateTime.ofInstant((java.time.Instant) d, ZoneOffset.UTC)));
|
d -> DEFAULT_FORMATTER.format(ZonedDateTime.ofInstant((java.time.Instant) d, ZoneOffset.UTC)));
|
||||||
transformers.put(LocalDate.class, d -> ((LocalDate) d).toString());
|
transformers.put(LocalDate.class, d -> ((LocalDate) d).toString());
|
||||||
transformers.put(LocalTime.class, d -> LOCAL_TIME_FORMATTER.format((LocalTime) d));
|
transformers.put(LocalTime.class, d -> LOCAL_TIME_FORMATTER.format((LocalTime) d));
|
||||||
|
transformers.put(JodaCompatibleZonedDateTime.class,
|
||||||
|
d -> DEFAULT_FORMATTER.format(((JodaCompatibleZonedDateTime) d).getZonedDateTime()));
|
||||||
return transformers;
|
return transformers;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,10 +28,17 @@ import org.elasticsearch.common.lease.Releasable;
|
||||||
public interface AtomicFieldData extends Accountable, Releasable {
|
public interface AtomicFieldData extends Accountable, Releasable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a "scripting" based values.
|
* Returns field values for use in scripting.
|
||||||
*/
|
*/
|
||||||
ScriptDocValues<?> getScriptValues();
|
ScriptDocValues<?> getScriptValues();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns field values for use by returned hits.
|
||||||
|
*/
|
||||||
|
default ScriptDocValues<?> getLegacyFieldValues() {
|
||||||
|
return getScriptValues();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a String representation of the values.
|
* Return a String representation of the values.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -26,26 +26,17 @@ import org.apache.lucene.util.BytesRefBuilder;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
|
||||||
import org.joda.time.DateTimeZone;
|
|
||||||
import org.joda.time.MutableDateTime;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.AccessController;
|
|
||||||
import java.security.PrivilegedAction;
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.time.ZoneOffset;
|
import java.time.ZoneOffset;
|
||||||
import java.time.ZonedDateTime;
|
|
||||||
import java.util.AbstractList;
|
import java.util.AbstractList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.UnaryOperator;
|
import java.util.function.UnaryOperator;
|
||||||
|
|
||||||
import static org.elasticsearch.common.Booleans.parseBoolean;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Script level doc values, the assumption is that any implementation will
|
* Script level doc values, the assumption is that any implementation will
|
||||||
* implement a <code>getValue</code> and a <code>getValues</code> that return
|
* implement a <code>getValue</code> and a <code>getValues</code> that return
|
||||||
|
@ -147,55 +138,28 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class Dates extends ScriptDocValues<Object> {
|
public static final class Dates extends ScriptDocValues<JodaCompatibleZonedDateTime> {
|
||||||
|
|
||||||
/** Whether scripts should expose dates as java time objects instead of joda time. */
|
|
||||||
private static final boolean USE_JAVA_TIME = parseBoolean(System.getProperty("es.scripting.use_java_time"), false);
|
|
||||||
|
|
||||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Dates.class));
|
|
||||||
|
|
||||||
private final SortedNumericDocValues in;
|
private final SortedNumericDocValues in;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Method call to add deprecation message. Normally this is
|
* Values wrapped in {@link java.time.ZonedDateTime} objects.
|
||||||
* {@link #deprecationLogger} but tests override.
|
|
||||||
*/
|
*/
|
||||||
private final Consumer<String> deprecationCallback;
|
private JodaCompatibleZonedDateTime[] dates;
|
||||||
|
|
||||||
/**
|
|
||||||
* Whether java time or joda time should be used. This is normally {@link #USE_JAVA_TIME} but tests override it.
|
|
||||||
*/
|
|
||||||
private final boolean useJavaTime;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Values wrapped in a date time object. The concrete type depends on the system property {@code es.scripting.use_java_time}.
|
|
||||||
* When that system property is {@code false}, the date time objects are of type {@link MutableDateTime}. When the system
|
|
||||||
* property is {@code true}, the date time objects are of type {@link java.time.ZonedDateTime}.
|
|
||||||
*/
|
|
||||||
private Object[] dates;
|
|
||||||
private int count;
|
private int count;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Standard constructor.
|
* Standard constructor.
|
||||||
*/
|
*/
|
||||||
public Dates(SortedNumericDocValues in) {
|
public Dates(SortedNumericDocValues in) {
|
||||||
this(in, message -> deprecationLogger.deprecatedAndMaybeLog("scripting_joda_time_deprecation", message), USE_JAVA_TIME);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor for testing with a deprecation callback.
|
|
||||||
*/
|
|
||||||
Dates(SortedNumericDocValues in, Consumer<String> deprecationCallback, boolean useJavaTime) {
|
|
||||||
this.in = in;
|
this.in = in;
|
||||||
this.deprecationCallback = deprecationCallback;
|
|
||||||
this.useJavaTime = useJavaTime;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch the first field value or 0 millis after epoch if there are no
|
* Fetch the first field value or 0 millis after epoch if there are no
|
||||||
* in.
|
* in.
|
||||||
*/
|
*/
|
||||||
public Object getValue() {
|
public JodaCompatibleZonedDateTime getValue() {
|
||||||
if (count == 0) {
|
if (count == 0) {
|
||||||
throw new IllegalStateException("A document doesn't have a value for a field! " +
|
throw new IllegalStateException("A document doesn't have a value for a field! " +
|
||||||
"Use doc[<field>].size()==0 to check if a document is missing a field!");
|
"Use doc[<field>].size()==0 to check if a document is missing a field!");
|
||||||
|
@ -204,7 +168,7 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object get(int index) {
|
public JodaCompatibleZonedDateTime get(int index) {
|
||||||
if (index >= count) {
|
if (index >= count) {
|
||||||
throw new IndexOutOfBoundsException(
|
throw new IndexOutOfBoundsException(
|
||||||
"attempted to fetch the [" + index + "] date when there are only ["
|
"attempted to fetch the [" + index + "] date when there are only ["
|
||||||
|
@ -235,41 +199,13 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
|
||||||
if (count == 0) {
|
if (count == 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (useJavaTime) {
|
if (dates == null || count > dates.length) {
|
||||||
if (dates == null || count > dates.length) {
|
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
|
||||||
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
|
dates = new JodaCompatibleZonedDateTime[count];
|
||||||
dates = new ZonedDateTime[count];
|
}
|
||||||
}
|
for (int i = 0; i < count; ++i) {
|
||||||
for (int i = 0; i < count; ++i) {
|
dates[i] = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(in.nextValue()), ZoneOffset.UTC);
|
||||||
dates[i] = ZonedDateTime.ofInstant(Instant.ofEpochMilli(in.nextValue()), ZoneOffset.UTC);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
deprecated("The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true" +
|
|
||||||
" to use the java time api for date field doc values");
|
|
||||||
if (dates == null || count > dates.length) {
|
|
||||||
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
|
|
||||||
dates = new MutableDateTime[count];
|
|
||||||
}
|
|
||||||
for (int i = 0; i < count; i++) {
|
|
||||||
dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Log a deprecation log, with the server's permissions, not the permissions of the
|
|
||||||
* script calling this method. We need to do this to prevent errors when rolling
|
|
||||||
* the log file.
|
|
||||||
*/
|
|
||||||
private void deprecated(String message) {
|
|
||||||
// Intentionally not calling SpecialPermission.check because this is supposed to be called by scripts
|
|
||||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
|
||||||
@Override
|
|
||||||
public Void run() {
|
|
||||||
deprecationCallback.accept(message);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,11 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||||
|
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Specialization of {@link AtomicNumericFieldData} for integers.
|
* Specialization of {@link AtomicNumericFieldData} for integers.
|
||||||
|
@ -47,6 +52,34 @@ abstract class AtomicLongFieldData implements AtomicNumericFieldData {
|
||||||
return ramBytesUsed;
|
return ramBytesUsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final ScriptDocValues<?> getLegacyFieldValues() {
|
||||||
|
switch (numericType) {
|
||||||
|
case DATE:
|
||||||
|
final ScriptDocValues.Dates realDV = new ScriptDocValues.Dates(getLongValues());
|
||||||
|
return new ScriptDocValues<DateTime>() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int size() {
|
||||||
|
return realDV.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateTime get(int index) {
|
||||||
|
JodaCompatibleZonedDateTime dt = realDV.get(index);
|
||||||
|
return new DateTime(dt.toInstant().toEpochMilli(), DateTimeZone.UTC);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNextDocId(int docId) throws IOException {
|
||||||
|
realDV.setNextDocId(docId);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
default:
|
||||||
|
return getScriptValues();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final ScriptDocValues<?> getScriptValues() {
|
public final ScriptDocValues<?> getScriptValues() {
|
||||||
switch (numericType) {
|
switch (numericType) {
|
||||||
|
|
|
@ -0,0 +1,414 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
import java.security.AccessController;
|
||||||
|
import java.security.PrivilegedAction;
|
||||||
|
import java.time.DayOfWeek;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.Month;
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.time.temporal.ChronoField;
|
||||||
|
import java.time.temporal.TemporalAdjuster;
|
||||||
|
import java.time.temporal.TemporalAmount;
|
||||||
|
import java.time.temporal.TemporalField;
|
||||||
|
import java.time.temporal.TemporalUnit;
|
||||||
|
import java.time.temporal.WeekFields;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A wrapper around ZonedDateTime that exposes joda methods for backcompat.
|
||||||
|
*/
|
||||||
|
public class JodaCompatibleZonedDateTime {
|
||||||
|
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||||
|
new DeprecationLogger(ESLoggerFactory.getLogger(JodaCompatibleZonedDateTime.class));
|
||||||
|
|
||||||
|
private static void logDeprecated(String key, String message, Object... params) {
|
||||||
|
// NOTE: we don't check SpecialPermission because this will be called (indirectly) from scripts
|
||||||
|
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
|
||||||
|
DEPRECATION_LOGGER.deprecatedAndMaybeLog(key, message, params);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void logDeprecatedMethod(String oldMethod, String newMethod) {
|
||||||
|
logDeprecated(oldMethod, "Use of the joda time method [{}] is deprecated. Use [{}] instead.", oldMethod, newMethod);
|
||||||
|
}
|
||||||
|
|
||||||
|
private ZonedDateTime dt;
|
||||||
|
|
||||||
|
public JodaCompatibleZonedDateTime(Instant instant, ZoneId zone) {
|
||||||
|
this.dt = ZonedDateTime.ofInstant(instant, zone);
|
||||||
|
}
|
||||||
|
|
||||||
|
// access the underlying ZonedDateTime
|
||||||
|
public ZonedDateTime getZonedDateTime() {
|
||||||
|
return dt;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
return dt.equals(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return dt.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return dt.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isAfter(ZonedDateTime o) {
|
||||||
|
return dt.isAfter(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isBefore(ZonedDateTime o) {
|
||||||
|
return dt.isBefore(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isEqual(ZonedDateTime o) {
|
||||||
|
return dt.isEqual(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getDayOfMonth() {
|
||||||
|
return dt.getDayOfMonth();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getDayOfYear() {
|
||||||
|
return dt.getDayOfYear();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getHour() {
|
||||||
|
return dt.getHour();
|
||||||
|
}
|
||||||
|
|
||||||
|
public LocalDate toLocalDate() {
|
||||||
|
return dt.toLocalDate();
|
||||||
|
}
|
||||||
|
|
||||||
|
public LocalDateTime toLocalDateTime() {
|
||||||
|
return dt.toLocalDateTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMinute() {
|
||||||
|
return dt.getMinute();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Month getMonth() {
|
||||||
|
return dt.getMonth();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMonthValue() {
|
||||||
|
return dt.getMonthValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getNano() {
|
||||||
|
return dt.getNano();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getSecond() {
|
||||||
|
return dt.getSecond();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getYear() {
|
||||||
|
return dt.getYear();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minus(TemporalAmount delta) {
|
||||||
|
return dt.minus(delta);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minus(long amount, TemporalUnit unit) {
|
||||||
|
return dt.minus(amount, unit);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusYears(long amount) {
|
||||||
|
return dt.minusYears(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusMonths(long amount) {
|
||||||
|
return dt.minusMonths(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusWeeks(long amount) {
|
||||||
|
return dt.minusWeeks(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusDays(long amount) {
|
||||||
|
return dt.minusDays(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusHours(long amount) {
|
||||||
|
return dt.minusHours(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusMinutes(long amount) {
|
||||||
|
return dt.minusMinutes(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusSeconds(long amount) {
|
||||||
|
return dt.minusSeconds(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime minusNanos(long amount) {
|
||||||
|
return dt.minusNanos(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plus(TemporalAmount amount) {
|
||||||
|
return dt.plus(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plus(long amount,TemporalUnit unit) {
|
||||||
|
return dt.plus(amount, unit);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusDays(long amount) {
|
||||||
|
return dt.plusDays(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusHours(long amount) {
|
||||||
|
return dt.plusHours(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusMinutes(long amount) {
|
||||||
|
return dt.plusMinutes(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusMonths(long amount) {
|
||||||
|
return dt.plusMonths(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusNanos(long amount) {
|
||||||
|
return dt.plusNanos(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusSeconds(long amount) {
|
||||||
|
return dt.plusSeconds(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusWeeks(long amount) {
|
||||||
|
return dt.plusWeeks(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime plusYears(long amount) {
|
||||||
|
return dt.plusYears(amount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Instant toInstant() {
|
||||||
|
return dt.toInstant();
|
||||||
|
}
|
||||||
|
|
||||||
|
public OffsetDateTime toOffsetDateTime() {
|
||||||
|
return dt.toOffsetDateTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressForbidden(reason = "only exposing the method as a passthrough")
|
||||||
|
public ZonedDateTime truncatedTo(TemporalUnit unit) {
|
||||||
|
return dt.truncatedTo(unit);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime with(TemporalAdjuster adjuster) {
|
||||||
|
return dt.with(adjuster);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime with(TemporalField field, long newValue) {
|
||||||
|
return dt.with(field, newValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withDayOfMonth(int value) {
|
||||||
|
return dt.withDayOfMonth(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withDayOfYear(int value) {
|
||||||
|
return dt.withDayOfYear(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withEarlierOffsetAtOverlap() {
|
||||||
|
return dt.withEarlierOffsetAtOverlap();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withFixedOffsetZone() {
|
||||||
|
return dt.withFixedOffsetZone();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withHour(int value) {
|
||||||
|
return dt.withHour(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withLaterOffsetAtOverlap() {
|
||||||
|
return dt.withLaterOffsetAtOverlap();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withMinute(int value) {
|
||||||
|
return dt.withMinute(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withMonth(int value) {
|
||||||
|
return dt.withMonth(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withNano(int value) {
|
||||||
|
return dt.withNano(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withSecond(int value) {
|
||||||
|
return dt.withSecond(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withYear(int value) {
|
||||||
|
return dt.withYear(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withZoneSameLocal(ZoneId zone) {
|
||||||
|
return dt.withZoneSameLocal(zone);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ZonedDateTime withZoneSameInstant(ZoneId zone) {
|
||||||
|
return dt.withZoneSameInstant(zone);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public long getMillis() {
|
||||||
|
logDeprecatedMethod("getMillis()", "toInstant().toEpochMilli()");
|
||||||
|
return dt.toInstant().toEpochMilli();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getCenturyOfEra() {
|
||||||
|
logDeprecatedMethod("getCenturyOfEra()", "get(ChronoField.YEAR_OF_ERA) / 100");
|
||||||
|
return dt.get(ChronoField.YEAR_OF_ERA) / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getEra() {
|
||||||
|
logDeprecatedMethod("getEra()", "get(ChronoField.ERA)");
|
||||||
|
return dt.get(ChronoField.ERA);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getHourOfDay() {
|
||||||
|
logDeprecatedMethod("getHourOfDay()", "getHour()");
|
||||||
|
return dt.getHour();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getMillisOfDay() {
|
||||||
|
logDeprecatedMethod("getMillisOfDay()", "get(ChronoField.MILLI_OF_DAY)");
|
||||||
|
return dt.get(ChronoField.MILLI_OF_DAY);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getMillisOfSecond() {
|
||||||
|
logDeprecatedMethod("getMillisOfSecond()", "get(ChronoField.MILLI_OF_SECOND)");
|
||||||
|
return dt.get(ChronoField.MILLI_OF_SECOND);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getMinuteOfDay() {
|
||||||
|
logDeprecatedMethod("getMinuteOfDay()", "get(ChronoField.MINUTE_OF_DAY)");
|
||||||
|
return dt.get(ChronoField.MINUTE_OF_DAY);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getMinuteOfHour() {
|
||||||
|
logDeprecatedMethod("getMinuteOfHour()", "getMinute()");
|
||||||
|
return dt.getMinute();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getMonthOfYear() {
|
||||||
|
logDeprecatedMethod("getMonthOfYear()", "getMonthValue()");
|
||||||
|
return dt.getMonthValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getSecondOfDay() {
|
||||||
|
logDeprecatedMethod("getSecondOfDay()", "get(ChronoField.SECOND_OF_DAY)");
|
||||||
|
return dt.get(ChronoField.SECOND_OF_DAY);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getSecondOfMinute() {
|
||||||
|
logDeprecatedMethod("getSecondOfMinute()", "getSecond()");
|
||||||
|
return dt.getSecond();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getWeekOfWeekyear() {
|
||||||
|
logDeprecatedMethod("getWeekOfWeekyear()", "get(WeekFields.ISO.weekOfWeekBasedYear())");
|
||||||
|
return dt.get(WeekFields.ISO.weekOfWeekBasedYear());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getWeekyear() {
|
||||||
|
logDeprecatedMethod("getWeekyear()", "get(WeekFields.ISO.weekBasedYear())");
|
||||||
|
return dt.get(WeekFields.ISO.weekBasedYear());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getYearOfCentury() {
|
||||||
|
logDeprecatedMethod("getYearOfCentury()", "get(ChronoField.YEAR_OF_ERA) % 100");
|
||||||
|
return dt.get(ChronoField.YEAR_OF_ERA) % 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getYearOfEra() {
|
||||||
|
logDeprecatedMethod("getYearOfEra()", "get(ChronoField.YEAR_OF_ERA)");
|
||||||
|
return dt.get(ChronoField.YEAR_OF_ERA);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public String toString(String format) {
|
||||||
|
logDeprecatedMethod("toString(String)", "a DateTimeFormatter");
|
||||||
|
// TODO: replace with bwc formatter
|
||||||
|
return new DateTime(dt.toInstant().toEpochMilli(), DateTimeZone.forID(dt.getZone().getId())).toString(format);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public String toString(String format, Locale locale) {
|
||||||
|
logDeprecatedMethod("toString(String,Locale)", "a DateTimeFormatter");
|
||||||
|
// TODO: replace with bwc formatter
|
||||||
|
return new DateTime(dt.toInstant().toEpochMilli(), DateTimeZone.forID(dt.getZone().getId())).toString(format, locale);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DayOfWeek getDayOfWeekEnum() {
|
||||||
|
return dt.getDayOfWeek();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
public int getDayOfWeek() {
|
||||||
|
logDeprecated("getDayOfWeek()",
|
||||||
|
"The return type of [getDayOfWeek()] will change to an enum in 7.0. Use getDayOfWeekEnum().getValue().");
|
||||||
|
return dt.getDayOfWeek().getValue();
|
||||||
|
}
|
||||||
|
}
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.support.values;
|
||||||
import org.apache.lucene.search.Scorable;
|
import org.apache.lucene.search.Scorable;
|
||||||
import org.elasticsearch.common.lucene.ScorerAware;
|
import org.elasticsearch.common.lucene.ScorerAware;
|
||||||
import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
|
import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
|
||||||
|
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.joda.time.ReadableInstant;
|
import org.joda.time.ReadableInstant;
|
||||||
|
@ -95,6 +96,8 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc
|
||||||
return ((ReadableInstant) o).getMillis();
|
return ((ReadableInstant) o).getMillis();
|
||||||
} else if (o instanceof ZonedDateTime) {
|
} else if (o instanceof ZonedDateTime) {
|
||||||
return ((ZonedDateTime) o).toInstant().toEpochMilli();
|
return ((ZonedDateTime) o).toInstant().toEpochMilli();
|
||||||
|
} else if (o instanceof JodaCompatibleZonedDateTime) {
|
||||||
|
return ((JodaCompatibleZonedDateTime) o).toInstant().toEpochMilli();
|
||||||
} else if (o instanceof Boolean) {
|
} else if (o instanceof Boolean) {
|
||||||
// We do expose boolean fields as boolean in scripts, however aggregations still expect
|
// We do expose boolean fields as boolean in scripts, however aggregations still expect
|
||||||
// that scripts return the same internal representation as regular fields, so boolean
|
// that scripts return the same internal representation as regular fields, so boolean
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.search.Scorable;
|
||||||
import org.apache.lucene.util.LongValues;
|
import org.apache.lucene.util.LongValues;
|
||||||
import org.elasticsearch.common.lucene.ScorerAware;
|
import org.elasticsearch.common.lucene.ScorerAware;
|
||||||
import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues;
|
import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues;
|
||||||
|
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.joda.time.ReadableInstant;
|
import org.joda.time.ReadableInstant;
|
||||||
|
@ -94,6 +95,8 @@ public class ScriptLongValues extends AbstractSortingNumericDocValues implements
|
||||||
return ((ReadableInstant) o).getMillis();
|
return ((ReadableInstant) o).getMillis();
|
||||||
} else if (o instanceof ZonedDateTime) {
|
} else if (o instanceof ZonedDateTime) {
|
||||||
return ((ZonedDateTime) o).toInstant().toEpochMilli();
|
return ((ZonedDateTime) o).toInstant().toEpochMilli();
|
||||||
|
} else if (o instanceof JodaCompatibleZonedDateTime) {
|
||||||
|
return ((JodaCompatibleZonedDateTime) o).toInstant().toEpochMilli();
|
||||||
} else if (o instanceof Boolean) {
|
} else if (o instanceof Boolean) {
|
||||||
// We do expose boolean fields as boolean in scripts, however aggregations still expect
|
// We do expose boolean fields as boolean in scripts, however aggregations still expect
|
||||||
// that scripts return the same internal representation as regular fields, so boolean
|
// that scripts return the same internal representation as regular fields, so boolean
|
||||||
|
|
|
@ -115,7 +115,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
||||||
subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
||||||
data = indexFieldData.load(subReaderContext);
|
data = indexFieldData.load(subReaderContext);
|
||||||
if (format == null) {
|
if (format == null) {
|
||||||
scriptValues = data.getScriptValues();
|
scriptValues = data.getLegacyFieldValues();
|
||||||
} else if (indexFieldData instanceof IndexNumericFieldData) {
|
} else if (indexFieldData instanceof IndexNumericFieldData) {
|
||||||
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
|
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
|
||||||
doubleValues = ((AtomicNumericFieldData) data).getDoubleValues();
|
doubleValues = ((AtomicNumericFieldData) data).getDoubleValues();
|
||||||
|
|
|
@ -70,7 +70,7 @@ public class DiffableStringMapTests extends ESTestCase {
|
||||||
m.put("2", "2");
|
m.put("2", "2");
|
||||||
m.put("3", "3");
|
m.put("3", "3");
|
||||||
DiffableStringMap dsm = new DiffableStringMap(m);
|
DiffableStringMap dsm = new DiffableStringMap(m);
|
||||||
DiffableStringMap expected = new DiffableStringMap(m);
|
Map<String, String> expected = new HashMap<>(m);
|
||||||
|
|
||||||
for (int i = 0; i < randomIntBetween(5, 50); i++) {
|
for (int i = 0; i < randomIntBetween(5, 50); i++) {
|
||||||
if (randomBoolean() && expected.size() > 1) {
|
if (randomBoolean() && expected.size() > 1) {
|
||||||
|
@ -80,7 +80,7 @@ public class DiffableStringMapTests extends ESTestCase {
|
||||||
} else {
|
} else {
|
||||||
expected.put(randomAlphaOfLength(2), randomAlphaOfLength(4));
|
expected.put(randomAlphaOfLength(2), randomAlphaOfLength(4));
|
||||||
}
|
}
|
||||||
dsm = expected.diff(dsm).apply(dsm);
|
dsm = new DiffableStringMap(expected).diff(dsm).apply(dsm);
|
||||||
}
|
}
|
||||||
assertThat(expected, equalTo(dsm));
|
assertThat(expected, equalTo(dsm));
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.cluster.routing.allocation.decider;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.ClusterInfo;
|
import org.elasticsearch.cluster.ClusterInfo;
|
||||||
|
import org.elasticsearch.cluster.ClusterName;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.DiskUsage;
|
import org.elasticsearch.cluster.DiskUsage;
|
||||||
import org.elasticsearch.cluster.ESAllocationTestCase;
|
import org.elasticsearch.cluster.ESAllocationTestCase;
|
||||||
|
@ -79,7 +80,8 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase {
|
||||||
.addAsNew(metaData.index("test"))
|
.addAsNew(metaData.index("test"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build();
|
ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
|
||||||
|
.metaData(metaData).routingTable(routingTable).build();
|
||||||
|
|
||||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||||
.add(node_0)
|
.add(node_0)
|
||||||
|
@ -110,6 +112,61 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase {
|
||||||
"disk space than the maximum allowed [90.0%]"));
|
"disk space than the maximum allowed [90.0%]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCannotAllocateDueToLackOfDiskResources() {
|
||||||
|
ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||||
|
DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss);
|
||||||
|
|
||||||
|
MetaData metaData = MetaData.builder()
|
||||||
|
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final Index index = metaData.index("test").getIndex();
|
||||||
|
|
||||||
|
ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), true, EmptyStoreRecoverySource.INSTANCE,
|
||||||
|
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
|
||||||
|
DiscoveryNode node_0 = new DiscoveryNode("node_0", buildNewFakeTransportAddress(), Collections.emptyMap(),
|
||||||
|
new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT);
|
||||||
|
DiscoveryNode node_1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Collections.emptyMap(),
|
||||||
|
new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT);
|
||||||
|
|
||||||
|
RoutingTable routingTable = RoutingTable.builder()
|
||||||
|
.addAsNew(metaData.index("test"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
|
||||||
|
.metaData(metaData).routingTable(routingTable).build();
|
||||||
|
|
||||||
|
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||||
|
.add(node_0)
|
||||||
|
.add(node_1)
|
||||||
|
).build();
|
||||||
|
|
||||||
|
// actual test -- after all that bloat :)
|
||||||
|
|
||||||
|
ImmutableOpenMap.Builder<String, DiskUsage> leastAvailableUsages = ImmutableOpenMap.builder();
|
||||||
|
leastAvailableUsages.put("node_0", new DiskUsage("node_0", "node_0", "_na_", 100, 0)); // all full
|
||||||
|
ImmutableOpenMap.Builder<String, DiskUsage> mostAvailableUsage = ImmutableOpenMap.builder();
|
||||||
|
final int freeBytes = randomIntBetween(20, 100);
|
||||||
|
mostAvailableUsage.put("node_0", new DiskUsage("node_0", "node_0", "_na_", 100, freeBytes));
|
||||||
|
|
||||||
|
ImmutableOpenMap.Builder<String, Long> shardSizes = ImmutableOpenMap.builder();
|
||||||
|
// way bigger than available space
|
||||||
|
final long shardSize = randomIntBetween(110, 1000);
|
||||||
|
shardSizes.put("[test][0][p]", shardSize);
|
||||||
|
ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), ImmutableOpenMap.of());
|
||||||
|
RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)),
|
||||||
|
clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime());
|
||||||
|
allocation.debugDecision(true);
|
||||||
|
Decision decision = decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation);
|
||||||
|
assertEquals(Decision.Type.NO, decision.type());
|
||||||
|
|
||||||
|
assertThat(decision.getExplanation(), containsString(
|
||||||
|
"allocating the shard to this node will bring the node above the high watermark cluster setting "
|
||||||
|
+"[cluster.routing.allocation.disk.watermark.high=90%] "
|
||||||
|
+ "and cause it to have less than the minimum required [0b] of free space "
|
||||||
|
+ "(free: [" + freeBytes + "b], estimated shard size: [" + shardSize + "b])"));
|
||||||
|
}
|
||||||
|
|
||||||
public void testCanRemainUsesLeastAvailableSpace() {
|
public void testCanRemainUsesLeastAvailableSpace() {
|
||||||
ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||||
DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss);
|
DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss);
|
||||||
|
|
|
@ -66,6 +66,7 @@ public class DateTimeUnitTests extends ESTestCase {
|
||||||
assertEquals(SECOND_OF_MINUTE, DateTimeUnit.resolve((byte) 8));
|
assertEquals(SECOND_OF_MINUTE, DateTimeUnit.resolve((byte) 8));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33749")
|
||||||
public void testConversion() {
|
public void testConversion() {
|
||||||
long millis = randomLongBetween(0, Instant.now().toEpochMilli());
|
long millis = randomLongBetween(0, Instant.now().toEpochMilli());
|
||||||
DateTimeZone zone = randomDateTimeZone();
|
DateTimeZone zone = randomDateTimeZone();
|
||||||
|
|
|
@ -81,6 +81,16 @@ public class ByteSizeUnitTests extends ESTestCase {
|
||||||
assertThat(PB.toPB(1), equalTo(1L));
|
assertThat(PB.toPB(1), equalTo(1L));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testToString() {
|
||||||
|
int v = randomIntBetween(1, 1023);
|
||||||
|
assertThat(new ByteSizeValue(PB.toBytes(v)).toString(), equalTo(v + "pb"));
|
||||||
|
assertThat(new ByteSizeValue(TB.toBytes(v)).toString(), equalTo(v + "tb"));
|
||||||
|
assertThat(new ByteSizeValue(GB.toBytes(v)).toString(), equalTo(v + "gb"));
|
||||||
|
assertThat(new ByteSizeValue(MB.toBytes(v)).toString(), equalTo(v + "mb"));
|
||||||
|
assertThat(new ByteSizeValue(KB.toBytes(v)).toString(), equalTo(v + "kb"));
|
||||||
|
assertThat(new ByteSizeValue(BYTES.toBytes(v)).toString(), equalTo(v + "b"));
|
||||||
|
}
|
||||||
|
|
||||||
public void testSerialization() throws IOException {
|
public void testSerialization() throws IOException {
|
||||||
for (ByteSizeUnit unit : ByteSizeUnit.values()) {
|
for (ByteSizeUnit unit : ByteSizeUnit.values()) {
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
|
|
|
@ -355,6 +355,7 @@ public class BigArraysTests extends ESTestCase {
|
||||||
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService(
|
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService(
|
||||||
Settings.builder()
|
Settings.builder()
|
||||||
.put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES)
|
.put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES)
|
||||||
|
.put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false)
|
||||||
.build(),
|
.build(),
|
||||||
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
|
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
|
||||||
BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking();
|
BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking();
|
||||||
|
@ -412,6 +413,7 @@ public class BigArraysTests extends ESTestCase {
|
||||||
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService(
|
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService(
|
||||||
Settings.builder()
|
Settings.builder()
|
||||||
.put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES)
|
.put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES)
|
||||||
|
.put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false)
|
||||||
.build(),
|
.build(),
|
||||||
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
|
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
|
||||||
BigArrays bigArrays = new BigArrays(null, hcbs, false);
|
BigArrays bigArrays = new BigArrays(null, hcbs, false);
|
||||||
|
|
|
@ -1,133 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata;
|
|
||||||
|
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues.Dates;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
import org.joda.time.DateTime;
|
|
||||||
import org.joda.time.DateTimeZone;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.security.AccessControlContext;
|
|
||||||
import java.security.AccessController;
|
|
||||||
import java.security.PermissionCollection;
|
|
||||||
import java.security.Permissions;
|
|
||||||
import java.security.PrivilegedAction;
|
|
||||||
import java.security.ProtectionDomain;
|
|
||||||
import java.time.Instant;
|
|
||||||
import java.time.ZoneOffset;
|
|
||||||
import java.time.ZonedDateTime;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
|
||||||
|
|
||||||
public class ScriptDocValuesDatesTests extends ESTestCase {
|
|
||||||
|
|
||||||
public void testJavaTime() throws IOException {
|
|
||||||
assertDateDocValues(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32779")
|
|
||||||
public void testJodaTimeBwc() throws IOException {
|
|
||||||
assertDateDocValues(false, "The joda time api for doc values is deprecated." +
|
|
||||||
" Use -Des.scripting.use_java_time=true to use the java time api for date field doc values");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void assertDateDocValues(boolean useJavaTime, String... expectedWarnings) throws IOException {
|
|
||||||
final Function<Long, Object> datetimeCtor;
|
|
||||||
if (useJavaTime) {
|
|
||||||
datetimeCtor = millis -> ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC);
|
|
||||||
} else {
|
|
||||||
datetimeCtor = millis -> new DateTime(millis, DateTimeZone.UTC);
|
|
||||||
}
|
|
||||||
long[][] values = new long[between(3, 10)][];
|
|
||||||
Object[][] expectedDates = new Object[values.length][];
|
|
||||||
for (int d = 0; d < values.length; d++) {
|
|
||||||
values[d] = new long[randomBoolean() ? randomBoolean() ? 0 : 1 : between(2, 100)];
|
|
||||||
expectedDates[d] = new Object[values[d].length];
|
|
||||||
for (int i = 0; i < values[d].length; i++) {
|
|
||||||
values[d][i] = randomNonNegativeLong();
|
|
||||||
expectedDates[d][i] = datetimeCtor.apply(values[d][i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Set<String> warnings = new HashSet<>();
|
|
||||||
Dates dates = wrap(values, deprecationMessage -> {
|
|
||||||
warnings.add(deprecationMessage);
|
|
||||||
/* Create a temporary directory to prove we are running with the
|
|
||||||
* server's permissions. */
|
|
||||||
createTempDir();
|
|
||||||
}, useJavaTime);
|
|
||||||
// each call to get or getValue will be run with limited permissions, just as they are in scripts
|
|
||||||
PermissionCollection noPermissions = new Permissions();
|
|
||||||
AccessControlContext noPermissionsAcc = new AccessControlContext(
|
|
||||||
new ProtectionDomain[] {
|
|
||||||
new ProtectionDomain(null, noPermissions)
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
for (int round = 0; round < 10; round++) {
|
|
||||||
int d = between(0, values.length - 1);
|
|
||||||
dates.setNextDocId(d);
|
|
||||||
if (expectedDates[d].length > 0) {
|
|
||||||
Object dateValue = AccessController.doPrivileged((PrivilegedAction<Object>) dates::getValue, noPermissionsAcc);
|
|
||||||
assertEquals(expectedDates[d][0] , dateValue);
|
|
||||||
} else {
|
|
||||||
Exception e = expectThrows(IllegalStateException.class, () -> dates.getValue());
|
|
||||||
assertEquals("A document doesn't have a value for a field! " +
|
|
||||||
"Use doc[<field>].size()==0 to check if a document is missing a field!", e.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
assertEquals(values[d].length, dates.size());
|
|
||||||
for (int i = 0; i < values[d].length; i++) {
|
|
||||||
final int ndx = i;
|
|
||||||
Object dateValue = AccessController.doPrivileged((PrivilegedAction<Object>) () -> dates.get(ndx), noPermissionsAcc);
|
|
||||||
assertEquals(expectedDates[d][i], dateValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assertThat(warnings, containsInAnyOrder(expectedWarnings));
|
|
||||||
}
|
|
||||||
|
|
||||||
private Dates wrap(long[][] values, Consumer<String> deprecationHandler, boolean useJavaTime) {
|
|
||||||
return new Dates(new AbstractSortedNumericDocValues() {
|
|
||||||
long[] current;
|
|
||||||
int i;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean advanceExact(int doc) {
|
|
||||||
current = values[doc];
|
|
||||||
i = 0;
|
|
||||||
return current.length > 0;
|
|
||||||
}
|
|
||||||
@Override
|
|
||||||
public int docValueCount() {
|
|
||||||
return current.length;
|
|
||||||
}
|
|
||||||
@Override
|
|
||||||
public long nextValue() {
|
|
||||||
return current[i++];
|
|
||||||
}
|
|
||||||
}, deprecationHandler, useJavaTime);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,240 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.apache.logging.log4j.core.Appender;
|
||||||
|
import org.apache.logging.log4j.core.LogEvent;
|
||||||
|
import org.apache.logging.log4j.core.appender.AbstractAppender;
|
||||||
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.security.AccessControlContext;
|
||||||
|
import java.security.AccessController;
|
||||||
|
import java.security.PermissionCollection;
|
||||||
|
import java.security.Permissions;
|
||||||
|
import java.security.PrivilegedAction;
|
||||||
|
import java.security.ProtectionDomain;
|
||||||
|
import java.time.DayOfWeek;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.Month;
|
||||||
|
import java.time.ZoneOffset;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class JodaCompatibleZonedDateTimeTests extends ESTestCase {
|
||||||
|
private static final Logger DEPRECATION_LOGGER =
|
||||||
|
LogManager.getLogger("org.elasticsearch.deprecation.script.JodaCompatibleZonedDateTime");
|
||||||
|
|
||||||
|
// each call to get or getValue will be run with limited permissions, just as they are in scripts
|
||||||
|
private static PermissionCollection NO_PERMISSIONS = new Permissions();
|
||||||
|
private static AccessControlContext NO_PERMISSIONS_ACC = new AccessControlContext(
|
||||||
|
new ProtectionDomain[] {
|
||||||
|
new ProtectionDomain(null, NO_PERMISSIONS)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
private JodaCompatibleZonedDateTime javaTime;
|
||||||
|
private DateTime jodaTime;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setupTime() {
|
||||||
|
long millis = randomIntBetween(0, Integer.MAX_VALUE);
|
||||||
|
javaTime = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(-7));
|
||||||
|
jodaTime = new DateTime(millis, DateTimeZone.forOffsetHours(-7));
|
||||||
|
}
|
||||||
|
|
||||||
|
void assertDeprecation(Runnable assertions, String message) {
|
||||||
|
Appender appender = new AbstractAppender("test", null, null) {
|
||||||
|
@Override
|
||||||
|
public void append(LogEvent event) {
|
||||||
|
/* Create a temporary directory to prove we are running with the
|
||||||
|
* server's permissions. */
|
||||||
|
createTempDir();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
appender.start();
|
||||||
|
Loggers.addAppender(DEPRECATION_LOGGER, appender);
|
||||||
|
try {
|
||||||
|
// the assertions are run with the same reduced privileges scripts run with
|
||||||
|
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
|
||||||
|
assertions.run();
|
||||||
|
return null;
|
||||||
|
}, NO_PERMISSIONS_ACC);
|
||||||
|
} finally {
|
||||||
|
appender.stop();
|
||||||
|
Loggers.removeAppender(DEPRECATION_LOGGER, appender);
|
||||||
|
}
|
||||||
|
|
||||||
|
assertWarnings(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
void assertMethodDeprecation(Runnable assertions, String oldMethod, String newMethod) {
|
||||||
|
assertDeprecation(assertions, "Use of the joda time method [" + oldMethod + "] is deprecated. Use [" + newMethod + "] instead.");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDayOfMonth() {
|
||||||
|
assertThat(javaTime.getDayOfMonth(), equalTo(jodaTime.getDayOfMonth()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDayOfYear() {
|
||||||
|
assertThat(javaTime.getDayOfYear(), equalTo(jodaTime.getDayOfYear()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testHour() {
|
||||||
|
assertThat(javaTime.getHour(), equalTo(jodaTime.getHourOfDay()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testLocalDate() {
|
||||||
|
assertThat(javaTime.toLocalDate(), equalTo(LocalDate.of(jodaTime.getYear(), jodaTime.getMonthOfYear(), jodaTime.getDayOfMonth())));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testLocalDateTime() {
|
||||||
|
LocalDateTime dt = LocalDateTime.of(jodaTime.getYear(), jodaTime.getMonthOfYear(), jodaTime.getDayOfMonth(),
|
||||||
|
jodaTime.getHourOfDay(), jodaTime.getMinuteOfHour(), jodaTime.getSecondOfMinute(),
|
||||||
|
jodaTime.getMillisOfSecond() * 1000000);
|
||||||
|
assertThat(javaTime.toLocalDateTime(), equalTo(dt));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMinute() {
|
||||||
|
assertThat(javaTime.getMinute(), equalTo(jodaTime.getMinuteOfHour()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMonth() {
|
||||||
|
assertThat(javaTime.getMonth(), equalTo(Month.of(jodaTime.getMonthOfYear())));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMonthValue() {
|
||||||
|
assertThat(javaTime.getMonthValue(), equalTo(jodaTime.getMonthOfYear()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNano() {
|
||||||
|
assertThat(javaTime.getNano(), equalTo(jodaTime.getMillisOfSecond() * 1000000));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSecond() {
|
||||||
|
assertThat(javaTime.getSecond(), equalTo(jodaTime.getSecondOfMinute()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testYear() {
|
||||||
|
assertThat(javaTime.getYear(), equalTo(jodaTime.getYear()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMillis() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getMillis(), equalTo(jodaTime.getMillis())),
|
||||||
|
"getMillis()", "toInstant().toEpochMilli()");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCenturyOfEra() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getCenturyOfEra(), equalTo(jodaTime.getCenturyOfEra())),
|
||||||
|
"getCenturyOfEra()", "get(ChronoField.YEAR_OF_ERA) / 100");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEra() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getEra(), equalTo(jodaTime.getEra())),
|
||||||
|
"getEra()", "get(ChronoField.ERA)");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testHourOfDay() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getHourOfDay(), equalTo(jodaTime.getHourOfDay())),
|
||||||
|
"getHourOfDay()", "getHour()");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMillisOfDay() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getMillisOfDay(), equalTo(jodaTime.getMillisOfDay())),
|
||||||
|
"getMillisOfDay()", "get(ChronoField.MILLI_OF_DAY)");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMillisOfSecond() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getMillisOfSecond(), equalTo(jodaTime.getMillisOfSecond())),
|
||||||
|
"getMillisOfSecond()", "get(ChronoField.MILLI_OF_SECOND)");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMinuteOfDay() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getMinuteOfDay(), equalTo(jodaTime.getMinuteOfDay())),
|
||||||
|
"getMinuteOfDay()", "get(ChronoField.MINUTE_OF_DAY)");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMinuteOfHour() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getMinuteOfHour(), equalTo(jodaTime.getMinuteOfHour())),
|
||||||
|
"getMinuteOfHour()", "getMinute()");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMonthOfYear() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getMonthOfYear(), equalTo(jodaTime.getMonthOfYear())),
|
||||||
|
"getMonthOfYear()", "getMonthValue()");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSecondOfDay() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getSecondOfDay(), equalTo(jodaTime.getSecondOfDay())),
|
||||||
|
"getSecondOfDay()", "get(ChronoField.SECOND_OF_DAY)");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSecondOfMinute() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getSecondOfMinute(), equalTo(jodaTime.getSecondOfMinute())),
|
||||||
|
"getSecondOfMinute()", "getSecond()");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWeekOfWeekyear() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getWeekOfWeekyear(), equalTo(jodaTime.getWeekOfWeekyear())),
|
||||||
|
"getWeekOfWeekyear()", "get(WeekFields.ISO.weekOfWeekBasedYear())");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWeekyear() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getWeekyear(), equalTo(jodaTime.getWeekyear())),
|
||||||
|
"getWeekyear()", "get(WeekFields.ISO.weekBasedYear())");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testYearOfCentury() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getYearOfCentury(), equalTo(jodaTime.getYearOfCentury())),
|
||||||
|
"getYearOfCentury()", "get(ChronoField.YEAR_OF_ERA) % 100");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testYearOfEra() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.getYearOfEra(), equalTo(jodaTime.getYearOfEra())),
|
||||||
|
"getYearOfEra()", "get(ChronoField.YEAR_OF_ERA)");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testToString1() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.toString("YYYY/MM/dd HH:mm:ss.SSS"),
|
||||||
|
equalTo(jodaTime.toString("YYYY/MM/dd HH:mm:ss.SSS"))), "toString(String)", "a DateTimeFormatter");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testToString2() {
|
||||||
|
assertMethodDeprecation(() -> assertThat(javaTime.toString("EEE", Locale.GERMANY),
|
||||||
|
equalTo(jodaTime.toString("EEE", Locale.GERMANY))), "toString(String,Locale)", "a DateTimeFormatter");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDayOfWeek() {
|
||||||
|
assertDeprecation(() -> assertThat(javaTime.getDayOfWeek(), equalTo(jodaTime.getDayOfWeek())),
|
||||||
|
"The return type of [getDayOfWeek()] will change to an enum in 7.0. Use getDayOfWeekEnum().getValue().");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDayOfWeekEnum() {
|
||||||
|
assertThat(javaTime.getDayOfWeekEnum(), equalTo(DayOfWeek.of(jodaTime.getDayOfWeek())));
|
||||||
|
}
|
||||||
|
}
|
|
@ -47,10 +47,13 @@ import org.elasticsearch.search.lookup.FieldLookup;
|
||||||
import org.elasticsearch.search.sort.SortOrder;
|
import org.elasticsearch.search.sort.SortOrder;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
import org.joda.time.ReadableDateTime;
|
||||||
|
import org.joda.time.format.DateTimeFormat;
|
||||||
|
|
||||||
import java.time.ZoneOffset;
|
import java.time.ZoneOffset;
|
||||||
import java.time.ZonedDateTime;
|
import java.time.ZonedDateTime;
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
|
@ -59,7 +62,6 @@ import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
@ -111,7 +113,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
scripts.put("doc['date'].date.millis", vars -> {
|
scripts.put("doc['date'].date.millis", vars -> {
|
||||||
Map<?, ?> doc = (Map) vars.get("doc");
|
Map<?, ?> doc = (Map) vars.get("doc");
|
||||||
ScriptDocValues.Dates dates = (ScriptDocValues.Dates) doc.get("date");
|
ScriptDocValues.Dates dates = (ScriptDocValues.Dates) doc.get("date");
|
||||||
return ((ZonedDateTime) dates.getValue()).toInstant().toEpochMilli();
|
return dates.getValue().toInstant().toEpochMilli();
|
||||||
});
|
});
|
||||||
|
|
||||||
scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1"));
|
scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1"));
|
||||||
|
@ -801,8 +803,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
||||||
ZonedDateTime dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
|
DateTime dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
|
||||||
assertThat(dateField.toInstant().toEpochMilli(), equalTo(date.toInstant().toEpochMilli()));
|
assertThat(dateField.getMillis(), equalTo(date.toInstant().toEpochMilli()));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
||||||
|
@ -828,7 +830,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
||||||
dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
|
dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
|
||||||
assertThat(dateField.toInstant().toEpochMilli(), equalTo(date.toInstant().toEpochMilli()));
|
assertThat(dateField.getMillis(), equalTo(date.toInstant().toEpochMilli()));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
||||||
|
@ -968,10 +970,10 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
assertAcked(prepareCreate("test").addMapping("type", mapping));
|
assertAcked(prepareCreate("test").addMapping("type", mapping));
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
|
|
||||||
ZonedDateTime date = ZonedDateTime.of(1990, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC);
|
DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC);
|
||||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ROOT);
|
org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd");
|
||||||
|
|
||||||
index("test", "type", "1", "text_field", "foo", "date_field", formatter.format(date));
|
index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date));
|
||||||
refresh("test");
|
refresh("test");
|
||||||
|
|
||||||
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
|
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
|
||||||
|
@ -999,8 +1001,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
DocumentField dateField = fields.get("date_field");
|
DocumentField dateField = fields.get("date_field");
|
||||||
assertThat(dateField.getName(), equalTo("date_field"));
|
assertThat(dateField.getName(), equalTo("date_field"));
|
||||||
|
|
||||||
ZonedDateTime fetchedDate = dateField.getValue();
|
ReadableDateTime fetchedDate = dateField.getValue();
|
||||||
assertThat(fetchedDate, equalTo(date));
|
assertThat(fetchedDate.getMillis(), equalTo(date.toInstant().getMillis()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWildcardDocValueFieldsWithFieldAlias() throws Exception {
|
public void testWildcardDocValueFieldsWithFieldAlias() throws Exception {
|
||||||
|
@ -1033,10 +1035,10 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
assertAcked(prepareCreate("test").addMapping("type", mapping));
|
assertAcked(prepareCreate("test").addMapping("type", mapping));
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
|
|
||||||
ZonedDateTime date = ZonedDateTime.of(1990, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC);
|
DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC);
|
||||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ROOT);
|
org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd");
|
||||||
|
|
||||||
index("test", "type", "1", "text_field", "foo", "date_field", formatter.format(date));
|
index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date));
|
||||||
refresh("test");
|
refresh("test");
|
||||||
|
|
||||||
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
|
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
|
||||||
|
@ -1063,8 +1065,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||||
DocumentField dateField = fields.get("date_field");
|
DocumentField dateField = fields.get("date_field");
|
||||||
assertThat(dateField.getName(), equalTo("date_field"));
|
assertThat(dateField.getName(), equalTo("date_field"));
|
||||||
|
|
||||||
ZonedDateTime fetchedDate = dateField.getValue();
|
ReadableDateTime fetchedDate = dateField.getValue();
|
||||||
assertThat(fetchedDate, equalTo(date));
|
assertThat(fetchedDate.getMillis(), equalTo(date.toInstant().getMillis()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -389,10 +389,27 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||||
throws Exception {
|
throws Exception {
|
||||||
CountDownLatch latch = new CountDownLatch(1);
|
CountDownLatch latch = new CountDownLatch(1);
|
||||||
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
||||||
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
|
ActionListener<Void> listener = ActionListener.wrap(
|
||||||
exceptionAtomicReference.set(x);
|
x -> latch.countDown(),
|
||||||
latch.countDown();
|
x -> {
|
||||||
});
|
/*
|
||||||
|
* This can occur on a thread submitted to the thread pool while we are closing the
|
||||||
|
* remote cluster connection at the end of the test.
|
||||||
|
*/
|
||||||
|
if (x instanceof CancellableThreads.ExecutionCancelledException) {
|
||||||
|
try {
|
||||||
|
// we should already be shutting down
|
||||||
|
assertEquals(0L, latch.getCount());
|
||||||
|
} finally {
|
||||||
|
// ensure we count down the latch on failure as well to not prevent failing tests from ending
|
||||||
|
latch.countDown();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
exceptionAtomicReference.set(x);
|
||||||
|
latch.countDown();
|
||||||
|
}
|
||||||
|
);
|
||||||
connection.updateSeedNodes(proxyAddress, seedNodes, listener);
|
connection.updateSeedNodes(proxyAddress, seedNodes, listener);
|
||||||
latch.await();
|
latch.await();
|
||||||
if (exceptionAtomicReference.get() != null) {
|
if (exceptionAtomicReference.get() != null) {
|
||||||
|
|
|
@ -64,6 +64,7 @@ thirdPartyAudit.excludes = [
|
||||||
|
|
||||||
task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
|
task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
|
||||||
checkForTestsInMain = true
|
checkForTestsInMain = true
|
||||||
|
javaHome = project.runtimeJavaHome
|
||||||
}
|
}
|
||||||
precommit.dependsOn namingConventionsMain
|
precommit.dependsOn namingConventionsMain
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,7 @@ dependencies {
|
||||||
|
|
||||||
compileOnly project(path: xpackModule('core'), configuration: 'default')
|
compileOnly project(path: xpackModule('core'), configuration: 'default')
|
||||||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||||
|
testCompile project(path: xpackModule('monitoring'), configuration: 'testArtifacts')
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencyLicenses {
|
dependencyLicenses {
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||||
|
|
||||||
|
subprojects {
|
||||||
|
project.tasks.withType(RestIntegTestTask) {
|
||||||
|
final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources')
|
||||||
|
project.copyRestSpec.from(xPackResources) {
|
||||||
|
include 'rest-api-spec/api/**'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Remove assemble on all qa projects because we don't need to publish
|
/* Remove assemble on all qa projects because we don't need to publish
|
||||||
* artifacts for them. */
|
* artifacts for them. */
|
||||||
|
|
|
@ -2,7 +2,7 @@ ccruser:
|
||||||
cluster:
|
cluster:
|
||||||
- manage_ccr
|
- manage_ccr
|
||||||
indices:
|
indices:
|
||||||
- names: [ 'allowed-index' ]
|
- names: [ 'allowed-index', 'logs-eu-*' ]
|
||||||
privileges:
|
privileges:
|
||||||
- monitor
|
- monitor
|
||||||
- read
|
- read
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.ccr;
|
package org.elasticsearch.xpack.ccr;
|
||||||
|
|
||||||
|
import org.apache.http.HttpHost;
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.client.Request;
|
import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
|
@ -119,6 +120,45 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAutoFollowPatterns() throws Exception {
|
||||||
|
assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster);
|
||||||
|
String allowedIndex = "logs-eu-20190101";
|
||||||
|
String disallowedIndex = "logs-us-20190101";
|
||||||
|
|
||||||
|
Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster");
|
||||||
|
request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}");
|
||||||
|
assertOK(client().performRequest(request));
|
||||||
|
|
||||||
|
try (RestClient leaderClient = buildLeaderClient()) {
|
||||||
|
for (String index : new String[]{allowedIndex, disallowedIndex}) {
|
||||||
|
Settings settings = Settings.builder()
|
||||||
|
.put("index.soft_deletes.enabled", true)
|
||||||
|
.build();
|
||||||
|
String requestBody = "{\"settings\": " + Strings.toString(settings) +
|
||||||
|
", \"mappings\": {\"_doc\": {\"properties\": {\"field\": {\"type\": \"keyword\"}}}} }";
|
||||||
|
request = new Request("PUT", "/" + index);
|
||||||
|
request.setJsonEntity(requestBody);
|
||||||
|
assertOK(leaderClient.performRequest(request));
|
||||||
|
|
||||||
|
for (int i = 0; i < 5; i++) {
|
||||||
|
String id = Integer.toString(i);
|
||||||
|
index(leaderClient, index, id, "field", i, "filtered_field", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assertBusy(() -> {
|
||||||
|
ensureYellow(allowedIndex);
|
||||||
|
verifyDocuments(adminClient(), allowedIndex, 5);
|
||||||
|
});
|
||||||
|
assertThat(indexExists(adminClient(), disallowedIndex), is(false));
|
||||||
|
|
||||||
|
// Cleanup by deleting auto follow pattern and unfollowing:
|
||||||
|
request = new Request("DELETE", "/_ccr/auto_follow/leader_cluster");
|
||||||
|
assertOK(client().performRequest(request));
|
||||||
|
unfollowIndex(allowedIndex);
|
||||||
|
}
|
||||||
|
|
||||||
private int countCcrNodeTasks() throws IOException {
|
private int countCcrNodeTasks() throws IOException {
|
||||||
final Request request = new Request("GET", "/_tasks");
|
final Request request = new Request("GET", "/_tasks");
|
||||||
request.addParameter("detailed", "true");
|
request.addParameter("detailed", "true");
|
||||||
|
@ -139,6 +179,10 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void index(String index, String id, Object... fields) throws IOException {
|
private static void index(String index, String id, Object... fields) throws IOException {
|
||||||
|
index(adminClient(), index, id, fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void index(RestClient client, String index, String id, Object... fields) throws IOException {
|
||||||
XContentBuilder document = jsonBuilder().startObject();
|
XContentBuilder document = jsonBuilder().startObject();
|
||||||
for (int i = 0; i < fields.length; i += 2) {
|
for (int i = 0; i < fields.length; i += 2) {
|
||||||
document.field((String) fields[i], fields[i + 1]);
|
document.field((String) fields[i], fields[i + 1]);
|
||||||
|
@ -146,7 +190,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
|
||||||
document.endObject();
|
document.endObject();
|
||||||
final Request request = new Request("POST", "/" + index + "/_doc/" + id);
|
final Request request = new Request("POST", "/" + index + "/_doc/" + id);
|
||||||
request.setJsonEntity(Strings.toString(document));
|
request.setJsonEntity(Strings.toString(document));
|
||||||
assertOK(adminClient().performRequest(request));
|
assertOK(client.performRequest(request));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void refresh(String index) throws IOException {
|
private static void refresh(String index) throws IOException {
|
||||||
|
@ -201,11 +245,34 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
|
||||||
assertOK(adminClient().performRequest(request));
|
assertOK(adminClient().performRequest(request));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void ensureYellow(String index) throws IOException {
|
||||||
|
Request request = new Request("GET", "/_cluster/health/" + index);
|
||||||
|
request.addParameter("wait_for_status", "yellow");
|
||||||
|
request.addParameter("wait_for_no_relocating_shards", "true");
|
||||||
|
request.addParameter("wait_for_no_initializing_shards", "true");
|
||||||
|
request.addParameter("timeout", "70s");
|
||||||
|
request.addParameter("level", "shards");
|
||||||
|
adminClient().performRequest(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
private RestClient buildLeaderClient() throws IOException {
|
||||||
|
assert runningAgainstLeaderCluster == false;
|
||||||
|
String leaderUrl = System.getProperty("tests.leader_host");
|
||||||
|
int portSeparator = leaderUrl.lastIndexOf(':');
|
||||||
|
HttpHost httpHost = new HttpHost(leaderUrl.substring(0, portSeparator),
|
||||||
|
Integer.parseInt(leaderUrl.substring(portSeparator + 1)), getProtocol());
|
||||||
|
return buildClient(restAdminSettings(), new HttpHost[]{httpHost});
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean indexExists(RestClient client, String index) throws IOException {
|
private static boolean indexExists(RestClient client, String index) throws IOException {
|
||||||
Response response = client.performRequest(new Request("HEAD", "/" + index));
|
Response response = client.performRequest(new Request("HEAD", "/" + index));
|
||||||
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
|
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void unfollowIndex(String followIndex) throws IOException {
|
||||||
|
assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/unfollow")));
|
||||||
|
}
|
||||||
|
|
||||||
private static void verifyCcrMonitoring(String expectedLeaderIndex, String expectedFollowerIndex) throws IOException {
|
private static void verifyCcrMonitoring(String expectedLeaderIndex, String expectedFollowerIndex) throws IOException {
|
||||||
ensureYellow(".monitoring-*");
|
ensureYellow(".monitoring-*");
|
||||||
|
|
||||||
|
@ -239,14 +306,4 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
|
||||||
assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1));
|
assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void ensureYellow(String index) throws IOException {
|
|
||||||
Request request = new Request("GET", "/_cluster/health/" + index);
|
|
||||||
request.addParameter("wait_for_status", "yellow");
|
|
||||||
request.addParameter("wait_for_no_relocating_shards", "true");
|
|
||||||
request.addParameter("wait_for_no_initializing_shards", "true");
|
|
||||||
request.addParameter("timeout", "70s");
|
|
||||||
request.addParameter("level", "shards");
|
|
||||||
adminClient().performRequest(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-test'
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||||
|
testCompile project(path: xpackModule('ccr'), configuration: 'runtime')
|
||||||
|
}
|
||||||
|
|
||||||
|
task restTest(type: RestIntegTestTask) {
|
||||||
|
mustRunAfter(precommit)
|
||||||
|
}
|
||||||
|
|
||||||
|
restTestCluster {
|
||||||
|
distribution 'zip'
|
||||||
|
setting 'xpack.ml.enabled', 'false'
|
||||||
|
setting 'xpack.monitoring.enabled', 'false'
|
||||||
|
setting 'xpack.security.enabled', 'true'
|
||||||
|
setting 'xpack.license.self_generated.type', 'trial'
|
||||||
|
// TODO: reduce the need for superuser here
|
||||||
|
setupCommand 'setup-ccr-user',
|
||||||
|
'bin/elasticsearch-users', 'useradd', 'ccr-user', '-p', 'ccr-user-password', '-r', 'superuser'
|
||||||
|
waitCondition = { node, ant ->
|
||||||
|
File tmpFile = new File(node.cwd, 'wait.success')
|
||||||
|
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",
|
||||||
|
dest: tmpFile.toString(),
|
||||||
|
username: 'ccr-user',
|
||||||
|
password: 'ccr-user-password',
|
||||||
|
ignoreerrors: true,
|
||||||
|
retries: 10)
|
||||||
|
return tmpFile.exists()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
check.dependsOn restTest
|
||||||
|
test.enabled = false
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.xpack.ccr;
|
||||||
|
|
||||||
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
import org.elasticsearch.common.settings.SecureString;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
|
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||||
|
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||||
|
import org.elasticsearch.xpack.ccr.action.ShardChangesAction;
|
||||||
|
import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
|
||||||
|
import org.junit.After;
|
||||||
|
|
||||||
|
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||||
|
|
||||||
|
public class CcrRestIT extends ESClientYamlSuiteTestCase {
|
||||||
|
|
||||||
|
public CcrRestIT(final ClientYamlTestCandidate testCandidate) {
|
||||||
|
super(testCandidate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParametersFactory
|
||||||
|
public static Iterable<Object[]> parameters() throws Exception {
|
||||||
|
return ESClientYamlSuiteTestCase.createParameters();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Settings restClientSettings() {
|
||||||
|
final String ccrUserAuthHeaderValue = basicAuthHeaderValue("ccr-user", new SecureString("ccr-user-password".toCharArray()));
|
||||||
|
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", ccrUserAuthHeaderValue).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void cleanup() throws Exception {
|
||||||
|
XPackRestTestHelper.waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(ShardChangesAction.NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -7,17 +7,23 @@
|
||||||
package org.elasticsearch.xpack.ccr;
|
package org.elasticsearch.xpack.ccr;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchStatusException;
|
import org.elasticsearch.ElasticsearchStatusException;
|
||||||
|
import org.elasticsearch.action.Action;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
import org.elasticsearch.action.ActionResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
|
import org.elasticsearch.action.support.ContextPreservingActionListener;
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
|
import org.elasticsearch.client.FilterClient;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
import org.elasticsearch.common.CheckedConsumer;
|
import org.elasticsearch.common.CheckedConsumer;
|
||||||
import org.elasticsearch.index.engine.CommitStats;
|
import org.elasticsearch.index.engine.CommitStats;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
|
@ -25,15 +31,19 @@ import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.license.RemoteClusterLicenseChecker;
|
import org.elasticsearch.license.RemoteClusterLicenseChecker;
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.xpack.ccr.action.ShardFollowTask;
|
||||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
import java.util.function.BooleanSupplier;
|
import java.util.function.BooleanSupplier;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates licensing checking for CCR.
|
* Encapsulates licensing checking for CCR.
|
||||||
|
@ -93,6 +103,7 @@ public final class CcrLicenseChecker {
|
||||||
request.indices(leaderIndex);
|
request.indices(leaderIndex);
|
||||||
checkRemoteClusterLicenseAndFetchClusterState(
|
checkRemoteClusterLicenseAndFetchClusterState(
|
||||||
client,
|
client,
|
||||||
|
Collections.emptyMap(),
|
||||||
clusterAlias,
|
clusterAlias,
|
||||||
request,
|
request,
|
||||||
onFailure,
|
onFailure,
|
||||||
|
@ -115,6 +126,7 @@ public final class CcrLicenseChecker {
|
||||||
*
|
*
|
||||||
* @param client the client
|
* @param client the client
|
||||||
* @param clusterAlias the remote cluster alias
|
* @param clusterAlias the remote cluster alias
|
||||||
|
* @param headers the headers to use for leader client
|
||||||
* @param request the cluster state request
|
* @param request the cluster state request
|
||||||
* @param onFailure the failure consumer
|
* @param onFailure the failure consumer
|
||||||
* @param leaderClusterStateConsumer the leader cluster state consumer
|
* @param leaderClusterStateConsumer the leader cluster state consumer
|
||||||
|
@ -122,12 +134,14 @@ public final class CcrLicenseChecker {
|
||||||
*/
|
*/
|
||||||
public <T> void checkRemoteClusterLicenseAndFetchClusterState(
|
public <T> void checkRemoteClusterLicenseAndFetchClusterState(
|
||||||
final Client client,
|
final Client client,
|
||||||
|
final Map<String, String> headers,
|
||||||
final String clusterAlias,
|
final String clusterAlias,
|
||||||
final ClusterStateRequest request,
|
final ClusterStateRequest request,
|
||||||
final Consumer<Exception> onFailure,
|
final Consumer<Exception> onFailure,
|
||||||
final Consumer<ClusterState> leaderClusterStateConsumer) {
|
final Consumer<ClusterState> leaderClusterStateConsumer) {
|
||||||
checkRemoteClusterLicenseAndFetchClusterState(
|
checkRemoteClusterLicenseAndFetchClusterState(
|
||||||
client,
|
client,
|
||||||
|
headers,
|
||||||
clusterAlias,
|
clusterAlias,
|
||||||
request,
|
request,
|
||||||
onFailure,
|
onFailure,
|
||||||
|
@ -144,6 +158,7 @@ public final class CcrLicenseChecker {
|
||||||
*
|
*
|
||||||
* @param client the client
|
* @param client the client
|
||||||
* @param clusterAlias the remote cluster alias
|
* @param clusterAlias the remote cluster alias
|
||||||
|
* @param headers the headers to use for leader client
|
||||||
* @param request the cluster state request
|
* @param request the cluster state request
|
||||||
* @param onFailure the failure consumer
|
* @param onFailure the failure consumer
|
||||||
* @param leaderClusterStateConsumer the leader cluster state consumer
|
* @param leaderClusterStateConsumer the leader cluster state consumer
|
||||||
|
@ -153,6 +168,7 @@ public final class CcrLicenseChecker {
|
||||||
*/
|
*/
|
||||||
private <T> void checkRemoteClusterLicenseAndFetchClusterState(
|
private <T> void checkRemoteClusterLicenseAndFetchClusterState(
|
||||||
final Client client,
|
final Client client,
|
||||||
|
final Map<String, String> headers,
|
||||||
final String clusterAlias,
|
final String clusterAlias,
|
||||||
final ClusterStateRequest request,
|
final ClusterStateRequest request,
|
||||||
final Consumer<Exception> onFailure,
|
final Consumer<Exception> onFailure,
|
||||||
|
@ -167,7 +183,7 @@ public final class CcrLicenseChecker {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) {
|
public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) {
|
||||||
if (licenseCheck.isSuccess()) {
|
if (licenseCheck.isSuccess()) {
|
||||||
final Client leaderClient = client.getRemoteClusterClient(clusterAlias);
|
final Client leaderClient = wrapClient(client.getRemoteClusterClient(clusterAlias), headers);
|
||||||
final ActionListener<ClusterStateResponse> clusterStateListener =
|
final ActionListener<ClusterStateResponse> clusterStateListener =
|
||||||
ActionListener.wrap(s -> leaderClusterStateConsumer.accept(s.getState()), onFailure);
|
ActionListener.wrap(s -> leaderClusterStateConsumer.accept(s.getState()), onFailure);
|
||||||
// following an index in remote cluster, so use remote client to fetch leader index metadata
|
// following an index in remote cluster, so use remote client to fetch leader index metadata
|
||||||
|
@ -237,6 +253,33 @@ public final class CcrLicenseChecker {
|
||||||
leaderClient.admin().indices().stats(request, ActionListener.wrap(indicesStatsHandler, onFailure));
|
leaderClient.admin().indices().stats(request, ActionListener.wrap(indicesStatsHandler, onFailure));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Client wrapClient(Client client, Map<String, String> headers) {
|
||||||
|
if (headers.isEmpty()) {
|
||||||
|
return client;
|
||||||
|
} else {
|
||||||
|
final ThreadContext threadContext = client.threadPool().getThreadContext();
|
||||||
|
Map<String, String> filteredHeaders = headers.entrySet().stream()
|
||||||
|
.filter(e -> ShardFollowTask.HEADER_FILTERS.contains(e.getKey()))
|
||||||
|
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
|
return new FilterClient(client) {
|
||||||
|
@Override
|
||||||
|
protected <Request extends ActionRequest, Response extends ActionResponse>
|
||||||
|
void doExecute(Action<Response> action, Request request, ActionListener<Response> listener) {
|
||||||
|
final Supplier<ThreadContext.StoredContext> supplier = threadContext.newRestorableContext(false);
|
||||||
|
try (ThreadContext.StoredContext ignore = stashWithHeaders(threadContext, filteredHeaders)) {
|
||||||
|
super.doExecute(action, request, new ContextPreservingActionListener<>(supplier, listener));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ThreadContext.StoredContext stashWithHeaders(ThreadContext threadContext, Map<String, String> headers) {
|
||||||
|
final ThreadContext.StoredContext storedContext = threadContext.stashContext();
|
||||||
|
threadContext.copyHeaders(headers.entrySet());
|
||||||
|
return storedContext;
|
||||||
|
}
|
||||||
|
|
||||||
private static ElasticsearchStatusException indexMetadataNonCompliantRemoteLicense(
|
private static ElasticsearchStatusException indexMetadataNonCompliantRemoteLicense(
|
||||||
final String leaderIndex, final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) {
|
final String leaderIndex, final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) {
|
||||||
final String clusterAlias = licenseCheck.remoteClusterLicenseInfo().clusterAlias();
|
final String clusterAlias = licenseCheck.remoteClusterLicenseInfo().clusterAlias();
|
||||||
|
|
|
@ -27,7 +27,7 @@ public final class CcrSettings {
|
||||||
* Index setting for a following index.
|
* Index setting for a following index.
|
||||||
*/
|
*/
|
||||||
public static final Setting<Boolean> CCR_FOLLOWING_INDEX_SETTING =
|
public static final Setting<Boolean> CCR_FOLLOWING_INDEX_SETTING =
|
||||||
Setting.boolSetting("index.xpack.ccr.following_index", false, Setting.Property.IndexScope);
|
Setting.boolSetting("index.xpack.ccr.following_index", false, Property.IndexScope, Property.InternalIndex);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Setting for controlling the interval in between polling leader clusters to check whether there are indices to follow
|
* Setting for controlling the interval in between polling leader clusters to check whether there are indices to follow
|
||||||
|
|
|
@ -103,19 +103,22 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
|
||||||
AutoFollower operation = new AutoFollower(handler, followerClusterState) {
|
AutoFollower operation = new AutoFollower(handler, followerClusterState) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void getLeaderClusterState(final String leaderClusterAlias, final BiConsumer<ClusterState, Exception> handler) {
|
void getLeaderClusterState(final Map<String, String> headers,
|
||||||
|
final String leaderClusterAlias,
|
||||||
|
final BiConsumer<ClusterState, Exception> handler) {
|
||||||
final ClusterStateRequest request = new ClusterStateRequest();
|
final ClusterStateRequest request = new ClusterStateRequest();
|
||||||
request.clear();
|
request.clear();
|
||||||
request.metaData(true);
|
request.metaData(true);
|
||||||
|
|
||||||
if ("_local_".equals(leaderClusterAlias)) {
|
if ("_local_".equals(leaderClusterAlias)) {
|
||||||
|
Client client = CcrLicenseChecker.wrapClient(AutoFollowCoordinator.this.client, headers);
|
||||||
client.admin().cluster().state(
|
client.admin().cluster().state(
|
||||||
request, ActionListener.wrap(r -> handler.accept(r.getState(), null), e -> handler.accept(null, e)));
|
request, ActionListener.wrap(r -> handler.accept(r.getState(), null), e -> handler.accept(null, e)));
|
||||||
} else {
|
} else {
|
||||||
final Client leaderClient = client.getRemoteClusterClient(leaderClusterAlias);
|
|
||||||
// TODO: set non-compliant status on auto-follow coordination that can be viewed via a stats API
|
// TODO: set non-compliant status on auto-follow coordination that can be viewed via a stats API
|
||||||
ccrLicenseChecker.checkRemoteClusterLicenseAndFetchClusterState(
|
ccrLicenseChecker.checkRemoteClusterLicenseAndFetchClusterState(
|
||||||
leaderClient,
|
client,
|
||||||
|
headers,
|
||||||
leaderClusterAlias,
|
leaderClusterAlias,
|
||||||
request,
|
request,
|
||||||
e -> handler.accept(null, e),
|
e -> handler.accept(null, e),
|
||||||
|
@ -125,15 +128,22 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void createAndFollow(FollowIndexAction.Request followRequest,
|
void createAndFollow(Map<String, String> headers,
|
||||||
|
FollowIndexAction.Request followRequest,
|
||||||
Runnable successHandler,
|
Runnable successHandler,
|
||||||
Consumer<Exception> failureHandler) {
|
Consumer<Exception> failureHandler) {
|
||||||
client.execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest),
|
Client followerClient = CcrLicenseChecker.wrapClient(client, headers);
|
||||||
ActionListener.wrap(r -> successHandler.run(), failureHandler));
|
CreateAndFollowIndexAction.Request request = new CreateAndFollowIndexAction.Request(followRequest);
|
||||||
|
followerClient.execute(
|
||||||
|
CreateAndFollowIndexAction.INSTANCE,
|
||||||
|
request,
|
||||||
|
ActionListener.wrap(r -> successHandler.run(), failureHandler)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void updateAutoFollowMetadata(Function<ClusterState, ClusterState> updateFunction, Consumer<Exception> handler) {
|
void updateAutoFollowMetadata(Function<ClusterState, ClusterState> updateFunction,
|
||||||
|
Consumer<Exception> handler) {
|
||||||
clusterService.submitStateUpdateTask("update_auto_follow_metadata", new ClusterStateUpdateTask() {
|
clusterService.submitStateUpdateTask("update_auto_follow_metadata", new ClusterStateUpdateTask() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -188,7 +198,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
|
||||||
AutoFollowPattern autoFollowPattern = entry.getValue();
|
AutoFollowPattern autoFollowPattern = entry.getValue();
|
||||||
List<String> followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias);
|
List<String> followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias);
|
||||||
|
|
||||||
getLeaderClusterState(clusterAlias, (leaderClusterState, e) -> {
|
getLeaderClusterState(autoFollowPattern.getHeaders(), clusterAlias, (leaderClusterState, e) -> {
|
||||||
if (leaderClusterState != null) {
|
if (leaderClusterState != null) {
|
||||||
assert e == null;
|
assert e == null;
|
||||||
handleClusterAlias(clusterAlias, autoFollowPattern, followedIndices, leaderClusterState);
|
handleClusterAlias(clusterAlias, autoFollowPattern, followedIndices, leaderClusterState);
|
||||||
|
@ -251,7 +261,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
|
||||||
finalise(followError);
|
finalise(followError);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
createAndFollow(followRequest, successHandler, failureHandler);
|
createAndFollow(autoFollowPattern.getHeaders(), followRequest, successHandler, failureHandler);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -314,14 +324,27 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
|
||||||
/**
|
/**
|
||||||
* Fetch the cluster state from the leader with the specified cluster alias
|
* Fetch the cluster state from the leader with the specified cluster alias
|
||||||
*
|
*
|
||||||
|
* @param headers the client headers
|
||||||
* @param leaderClusterAlias the cluster alias of the leader
|
* @param leaderClusterAlias the cluster alias of the leader
|
||||||
* @param handler the callback to invoke
|
* @param handler the callback to invoke
|
||||||
*/
|
*/
|
||||||
abstract void getLeaderClusterState(String leaderClusterAlias, BiConsumer<ClusterState, Exception> handler);
|
abstract void getLeaderClusterState(
|
||||||
|
Map<String, String> headers,
|
||||||
|
String leaderClusterAlias,
|
||||||
|
BiConsumer<ClusterState, Exception> handler
|
||||||
|
);
|
||||||
|
|
||||||
abstract void createAndFollow(FollowIndexAction.Request followRequest, Runnable successHandler, Consumer<Exception> failureHandler);
|
abstract void createAndFollow(
|
||||||
|
Map<String, String> headers,
|
||||||
|
FollowIndexAction.Request followRequest,
|
||||||
|
Runnable successHandler,
|
||||||
|
Consumer<Exception> failureHandler
|
||||||
|
);
|
||||||
|
|
||||||
abstract void updateAutoFollowMetadata(Function<ClusterState, ClusterState> updateFunction, Consumer<Exception> handler);
|
abstract void updateAutoFollowMetadata(
|
||||||
|
Function<ClusterState, ClusterState> updateFunction,
|
||||||
|
Consumer<Exception> handler
|
||||||
|
);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,9 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.ccr.action;
|
package org.elasticsearch.xpack.ccr.action;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.action.Action;
|
import org.elasticsearch.action.Action;
|
||||||
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.ActionResponse;
|
import org.elasticsearch.action.ActionResponse;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
|
@ -19,6 +21,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
|
@ -36,8 +39,10 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||||
|
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||||
|
|
||||||
public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
|
|
||||||
|
@ -59,6 +64,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
private int maxOperationCount;
|
private int maxOperationCount;
|
||||||
private ShardId shardId;
|
private ShardId shardId;
|
||||||
private String expectedHistoryUUID;
|
private String expectedHistoryUUID;
|
||||||
|
private TimeValue pollTimeout = FollowIndexAction.DEFAULT_POLL_TIMEOUT;
|
||||||
private long maxOperationSizeInBytes = FollowIndexAction.DEFAULT_MAX_BATCH_SIZE_IN_BYTES;
|
private long maxOperationSizeInBytes = FollowIndexAction.DEFAULT_MAX_BATCH_SIZE_IN_BYTES;
|
||||||
|
|
||||||
public Request(ShardId shardId, String expectedHistoryUUID) {
|
public Request(ShardId shardId, String expectedHistoryUUID) {
|
||||||
|
@ -102,6 +108,14 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
return expectedHistoryUUID;
|
return expectedHistoryUUID;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TimeValue getPollTimeout() {
|
||||||
|
return pollTimeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPollTimeout(final TimeValue pollTimeout) {
|
||||||
|
this.pollTimeout = Objects.requireNonNull(pollTimeout, "pollTimeout");
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ActionRequestValidationException validate() {
|
public ActionRequestValidationException validate() {
|
||||||
ActionRequestValidationException validationException = null;
|
ActionRequestValidationException validationException = null;
|
||||||
|
@ -126,6 +140,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
maxOperationCount = in.readVInt();
|
maxOperationCount = in.readVInt();
|
||||||
shardId = ShardId.readShardId(in);
|
shardId = ShardId.readShardId(in);
|
||||||
expectedHistoryUUID = in.readString();
|
expectedHistoryUUID = in.readString();
|
||||||
|
pollTimeout = in.readTimeValue();
|
||||||
maxOperationSizeInBytes = in.readVLong();
|
maxOperationSizeInBytes = in.readVLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,6 +151,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
out.writeVInt(maxOperationCount);
|
out.writeVInt(maxOperationCount);
|
||||||
shardId.writeTo(out);
|
shardId.writeTo(out);
|
||||||
out.writeString(expectedHistoryUUID);
|
out.writeString(expectedHistoryUUID);
|
||||||
|
out.writeTimeValue(pollTimeout);
|
||||||
out.writeVLong(maxOperationSizeInBytes);
|
out.writeVLong(maxOperationSizeInBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -149,12 +165,13 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
maxOperationCount == request.maxOperationCount &&
|
maxOperationCount == request.maxOperationCount &&
|
||||||
Objects.equals(shardId, request.shardId) &&
|
Objects.equals(shardId, request.shardId) &&
|
||||||
Objects.equals(expectedHistoryUUID, request.expectedHistoryUUID) &&
|
Objects.equals(expectedHistoryUUID, request.expectedHistoryUUID) &&
|
||||||
|
Objects.equals(pollTimeout, request.pollTimeout) &&
|
||||||
maxOperationSizeInBytes == request.maxOperationSizeInBytes;
|
maxOperationSizeInBytes == request.maxOperationSizeInBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(fromSeqNo, maxOperationCount, shardId, expectedHistoryUUID, maxOperationSizeInBytes);
|
return Objects.hash(fromSeqNo, maxOperationCount, shardId, expectedHistoryUUID, pollTimeout, maxOperationSizeInBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -164,6 +181,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
", maxOperationCount=" + maxOperationCount +
|
", maxOperationCount=" + maxOperationCount +
|
||||||
", shardId=" + shardId +
|
", shardId=" + shardId +
|
||||||
", expectedHistoryUUID=" + expectedHistoryUUID +
|
", expectedHistoryUUID=" + expectedHistoryUUID +
|
||||||
|
", pollTimeout=" + pollTimeout +
|
||||||
", maxOperationSizeInBytes=" + maxOperationSizeInBytes +
|
", maxOperationSizeInBytes=" + maxOperationSizeInBytes +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
|
@ -265,19 +283,90 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Response shardOperation(Request request, ShardId shardId) throws IOException {
|
protected Response shardOperation(Request request, ShardId shardId) throws IOException {
|
||||||
IndexService indexService = indicesService.indexServiceSafe(request.getShard().getIndex());
|
final IndexService indexService = indicesService.indexServiceSafe(request.getShard().getIndex());
|
||||||
IndexShard indexShard = indexService.getShard(request.getShard().id());
|
final IndexShard indexShard = indexService.getShard(request.getShard().id());
|
||||||
final SeqNoStats seqNoStats = indexShard.seqNoStats();
|
final SeqNoStats seqNoStats = indexShard.seqNoStats();
|
||||||
final long mappingVersion = clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion();
|
final long mappingVersion = clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion();
|
||||||
|
|
||||||
final Translog.Operation[] operations = getOperations(
|
final Translog.Operation[] operations = getOperations(
|
||||||
indexShard,
|
indexShard,
|
||||||
seqNoStats.getGlobalCheckpoint(),
|
seqNoStats.getGlobalCheckpoint(),
|
||||||
request.fromSeqNo,
|
request.getFromSeqNo(),
|
||||||
request.maxOperationCount,
|
request.getMaxOperationCount(),
|
||||||
request.expectedHistoryUUID,
|
request.getExpectedHistoryUUID(),
|
||||||
request.maxOperationSizeInBytes);
|
request.getMaxOperationSizeInBytes());
|
||||||
return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), operations);
|
return getResponse(mappingVersion, seqNoStats, operations);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void asyncShardOperation(
|
||||||
|
final Request request,
|
||||||
|
final ShardId shardId,
|
||||||
|
final ActionListener<Response> listener) throws IOException {
|
||||||
|
final IndexService indexService = indicesService.indexServiceSafe(request.getShard().getIndex());
|
||||||
|
final IndexShard indexShard = indexService.getShard(request.getShard().id());
|
||||||
|
final SeqNoStats seqNoStats = indexShard.seqNoStats();
|
||||||
|
|
||||||
|
if (request.getFromSeqNo() > seqNoStats.getGlobalCheckpoint()) {
|
||||||
|
logger.trace(
|
||||||
|
"{} waiting for global checkpoint advancement from [{}] to [{}]",
|
||||||
|
shardId,
|
||||||
|
seqNoStats.getGlobalCheckpoint(),
|
||||||
|
request.getFromSeqNo());
|
||||||
|
indexShard.addGlobalCheckpointListener(
|
||||||
|
request.getFromSeqNo(),
|
||||||
|
(g, e) -> {
|
||||||
|
if (g != UNASSIGNED_SEQ_NO) {
|
||||||
|
assert request.getFromSeqNo() <= g
|
||||||
|
: shardId + " only advanced to [" + g + "] while waiting for [" + request.getFromSeqNo() + "]";
|
||||||
|
globalCheckpointAdvanced(shardId, g, request, listener);
|
||||||
|
} else {
|
||||||
|
assert e != null;
|
||||||
|
globalCheckpointAdvancementFailure(shardId, e, request, listener, indexShard);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
request.getPollTimeout());
|
||||||
|
} else {
|
||||||
|
super.asyncShardOperation(request, shardId, listener);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void globalCheckpointAdvanced(
|
||||||
|
final ShardId shardId,
|
||||||
|
final long globalCheckpoint,
|
||||||
|
final Request request,
|
||||||
|
final ActionListener<Response> listener) {
|
||||||
|
logger.trace("{} global checkpoint advanced to [{}] after waiting for [{}]", shardId, globalCheckpoint, request.getFromSeqNo());
|
||||||
|
try {
|
||||||
|
super.asyncShardOperation(request, shardId, listener);
|
||||||
|
} catch (final IOException caught) {
|
||||||
|
listener.onFailure(caught);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void globalCheckpointAdvancementFailure(
|
||||||
|
final ShardId shardId,
|
||||||
|
final Exception e,
|
||||||
|
final Request request,
|
||||||
|
final ActionListener<Response> listener,
|
||||||
|
final IndexShard indexShard) {
|
||||||
|
logger.trace(
|
||||||
|
() -> new ParameterizedMessage(
|
||||||
|
"{} exception waiting for global checkpoint advancement to [{}]", shardId, request.getFromSeqNo()),
|
||||||
|
e);
|
||||||
|
if (e instanceof TimeoutException) {
|
||||||
|
try {
|
||||||
|
final long mappingVersion =
|
||||||
|
clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion();
|
||||||
|
final SeqNoStats latestSeqNoStats = indexShard.seqNoStats();
|
||||||
|
listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, EMPTY_OPERATIONS_ARRAY));
|
||||||
|
} catch (final Exception caught) {
|
||||||
|
caught.addSuppressed(e);
|
||||||
|
listener.onFailure(caught);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
listener.onFailure(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -300,7 +389,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Translog.Operation[] EMPTY_OPERATIONS_ARRAY = new Translog.Operation[0];
|
static final Translog.Operation[] EMPTY_OPERATIONS_ARRAY = new Translog.Operation[0];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns at most maxOperationCount operations from the specified from sequence number.
|
* Returns at most maxOperationCount operations from the specified from sequence number.
|
||||||
|
@ -324,7 +413,8 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
historyUUID + "]");
|
historyUUID + "]");
|
||||||
}
|
}
|
||||||
if (fromSeqNo > globalCheckpoint) {
|
if (fromSeqNo > globalCheckpoint) {
|
||||||
return EMPTY_OPERATIONS_ARRAY;
|
throw new IllegalStateException(
|
||||||
|
"not exposing operations from [" + fromSeqNo + "] greater than the global checkpoint [" + globalCheckpoint + "]");
|
||||||
}
|
}
|
||||||
int seenBytes = 0;
|
int seenBytes = 0;
|
||||||
// - 1 is needed, because toSeqNo is inclusive
|
// - 1 is needed, because toSeqNo is inclusive
|
||||||
|
@ -344,4 +434,8 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
|
||||||
return operations.toArray(EMPTY_OPERATIONS_ARRAY);
|
return operations.toArray(EMPTY_OPERATIONS_ARRAY);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, final Translog.Operation[] operations) {
|
||||||
|
return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), operations);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,8 +50,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
||||||
|
|
||||||
private final String leaderIndex;
|
private final String leaderIndex;
|
||||||
private final ShardFollowTask params;
|
private final ShardFollowTask params;
|
||||||
|
private final TimeValue pollTimeout;
|
||||||
private final TimeValue maxRetryDelay;
|
private final TimeValue maxRetryDelay;
|
||||||
private final TimeValue idleShardChangesRequestDelay;
|
|
||||||
private final BiConsumer<TimeValue, Runnable> scheduler;
|
private final BiConsumer<TimeValue, Runnable> scheduler;
|
||||||
private final LongSupplier relativeTimeProvider;
|
private final LongSupplier relativeTimeProvider;
|
||||||
|
|
||||||
|
@ -82,8 +82,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
||||||
this.params = params;
|
this.params = params;
|
||||||
this.scheduler = scheduler;
|
this.scheduler = scheduler;
|
||||||
this.relativeTimeProvider = relativeTimeProvider;
|
this.relativeTimeProvider = relativeTimeProvider;
|
||||||
|
this.pollTimeout = params.getPollTimeout();
|
||||||
this.maxRetryDelay = params.getMaxRetryDelay();
|
this.maxRetryDelay = params.getMaxRetryDelay();
|
||||||
this.idleShardChangesRequestDelay = params.getIdleShardRetryDelay();
|
|
||||||
/*
|
/*
|
||||||
* We keep track of the most recent fetch exceptions, with the number of exceptions that we track equal to the maximum number of
|
* We keep track of the most recent fetch exceptions, with the number of exceptions that we track equal to the maximum number of
|
||||||
* concurrent fetches. For each failed fetch, we track the from sequence number associated with the request, and we clear the entry
|
* concurrent fetches. For each failed fetch, we track the from sequence number associated with the request, and we clear the entry
|
||||||
|
@ -229,12 +229,16 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
||||||
}
|
}
|
||||||
innerSendShardChangesRequest(from, maxOperationCount,
|
innerSendShardChangesRequest(from, maxOperationCount,
|
||||||
response -> {
|
response -> {
|
||||||
synchronized (ShardFollowNodeTask.this) {
|
if (response.getOperations().length > 0) {
|
||||||
totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime);
|
// do not count polls against fetch stats
|
||||||
numberOfSuccessfulFetches++;
|
synchronized (ShardFollowNodeTask.this) {
|
||||||
fetchExceptions.remove(from);
|
totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime);
|
||||||
operationsReceived += response.getOperations().length;
|
numberOfSuccessfulFetches++;
|
||||||
totalTransferredBytes += Arrays.stream(response.getOperations()).mapToLong(Translog.Operation::estimateSize).sum();
|
fetchExceptions.remove(from);
|
||||||
|
operationsReceived += response.getOperations().length;
|
||||||
|
totalTransferredBytes +=
|
||||||
|
Arrays.stream(response.getOperations()).mapToLong(Translog.Operation::estimateSize).sum();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
handleReadResponse(from, maxRequiredSeqNo, response);
|
handleReadResponse(from, maxRequiredSeqNo, response);
|
||||||
},
|
},
|
||||||
|
@ -286,15 +290,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
||||||
} else {
|
} else {
|
||||||
// read is completed, decrement
|
// read is completed, decrement
|
||||||
numConcurrentReads--;
|
numConcurrentReads--;
|
||||||
if (response.getOperations().length == 0 && leaderGlobalCheckpoint == lastRequestedSeqNo) {
|
coordinateReads();
|
||||||
// we got nothing and we have no reason to believe asking again well get us more, treat shard as idle and delay
|
|
||||||
// future requests
|
|
||||||
LOGGER.trace("{} received no ops and no known ops to fetch, scheduling to coordinate reads",
|
|
||||||
params.getFollowShardId());
|
|
||||||
scheduler.accept(idleShardChangesRequestDelay, this::coordinateReads);
|
|
||||||
} else {
|
|
||||||
coordinateReads();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue