Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
0b42eda0e3
|
@ -161,7 +161,7 @@ Please follow these formatting guidelines:
|
|||
* Line width is 140 characters
|
||||
* The rest is left to Java coding standards
|
||||
* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do.
|
||||
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them.
|
||||
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE:
|
||||
* Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value.
|
||||
* IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value.
|
||||
* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so.
|
||||
|
@ -320,7 +320,7 @@ have to test Elasticsearch.
|
|||
#### Configurations
|
||||
|
||||
Gradle organizes dependencies and build artifacts into "configurations" and
|
||||
allows you to use these configurations arbitrarilly. Here are some of the most
|
||||
allows you to use these configurations arbitrarily. Here are some of the most
|
||||
common configurations in our build and how we use them:
|
||||
|
||||
<dl>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[[Testing Framework Cheatsheet]]
|
||||
[[TestingFrameworkCheatsheet]]
|
||||
= Testing
|
||||
|
||||
[partintro]
|
||||
|
@ -250,7 +250,7 @@ Pass arbitrary jvm arguments.
|
|||
|
||||
Running backwards compatibility tests is disabled by default since it
|
||||
requires a release version of elasticsearch to be present on the test system.
|
||||
To run backwards compatibilty tests untar or unzip a release and run the tests
|
||||
To run backwards compatibility tests untar or unzip a release and run the tests
|
||||
with the following command:
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
|
|
|
@ -44,6 +44,8 @@ compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-u
|
|||
// needs to be added separately otherwise Gradle will quote it and javac will fail
|
||||
compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
|
||||
|
||||
run.executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
|
||||
// classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
|
||||
forbiddenApisMain.enabled = false
|
||||
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.benchmark.fs;
|
||||
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Fork;
|
||||
import org.openjdk.jmh.annotations.Measurement;
|
||||
import org.openjdk.jmh.annotations.Mode;
|
||||
import org.openjdk.jmh.annotations.OutputTimeUnit;
|
||||
import org.openjdk.jmh.annotations.Scope;
|
||||
import org.openjdk.jmh.annotations.Setup;
|
||||
import org.openjdk.jmh.annotations.State;
|
||||
import org.openjdk.jmh.annotations.Warmup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
|
||||
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
|
||||
@Fork(3)
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.NANOSECONDS)
|
||||
@State(Scope.Benchmark)
|
||||
public class AvailableIndexFoldersBenchmark {
|
||||
|
||||
private NodeEnvironment.NodePath nodePath;
|
||||
private NodeEnvironment nodeEnv;
|
||||
private Set<String> excludedDirs;
|
||||
|
||||
@Setup
|
||||
public void setup() throws IOException {
|
||||
Path path = Files.createTempDirectory("test");
|
||||
String[] paths = new String[] {path.toString()};
|
||||
nodePath = new NodeEnvironment.NodePath(path);
|
||||
|
||||
LogConfigurator.setNodeName("test");
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||
nodeEnv = new NodeEnvironment(settings, new Environment(settings, null));
|
||||
|
||||
Files.createDirectories(nodePath.indicesPath);
|
||||
excludedDirs = new HashSet<>();
|
||||
int numIndices = 5000;
|
||||
for (int i = 0; i < numIndices; i++) {
|
||||
String dirName = "dir" + i;
|
||||
Files.createDirectory(nodePath.indicesPath.resolve(dirName));
|
||||
excludedDirs.add(dirName);
|
||||
}
|
||||
if (nodeEnv.availableIndexFoldersForPath(nodePath).size() != numIndices) {
|
||||
throw new IllegalStateException("bad size");
|
||||
}
|
||||
if (nodeEnv.availableIndexFoldersForPath(nodePath, excludedDirs::contains).size() != 0) {
|
||||
throw new IllegalStateException("bad size");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Benchmark
|
||||
public Set<String> availableIndexFolderNaive() throws IOException {
|
||||
return nodeEnv.availableIndexFoldersForPath(nodePath);
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public Set<String> availableIndexFolderOptimized() throws IOException {
|
||||
return nodeEnv.availableIndexFoldersForPath(nodePath, excludedDirs::contains);
|
||||
}
|
||||
|
||||
}
|
|
@ -264,7 +264,11 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
throw new InvalidUserDataException('Seed should be ' +
|
||||
'set on the project instead of a system property')
|
||||
}
|
||||
sysproperty key: prop.getKey(), value: prop.getValue().toString()
|
||||
if (prop.getValue() instanceof Closure) {
|
||||
sysproperty key: prop.getKey(), value: (prop.getValue() as Closure).call().toString()
|
||||
} else {
|
||||
sysproperty key: prop.getKey(), value: prop.getValue().toString()
|
||||
}
|
||||
}
|
||||
systemProperty 'tests.seed', project.testSeed
|
||||
for (Map.Entry<String, Object> envvar : environmentVariables) {
|
||||
|
|
|
@ -122,7 +122,7 @@ class VersionCollection {
|
|||
if (isReleased(version) == false) {
|
||||
// caveat 1 - This should only ever contain 2 non released branches in flight. An example is 6.x is frozen,
|
||||
// and 6.2 is cut but not yet released there is some simple logic to make sure that in the case of more than 2,
|
||||
// it will bail. The order is that the minor snapshot is fufilled first, and then the staged minor snapshot
|
||||
// it will bail. The order is that the minor snapshot is fulfilled first, and then the staged minor snapshot
|
||||
if (nextMinorSnapshot == null) {
|
||||
// it has not been set yet
|
||||
nextMinorSnapshot = replaceAsSnapshot(version)
|
||||
|
|
|
@ -72,7 +72,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
|
||||
/**
|
||||
* Root directory containing all the files generated by this task. It is
|
||||
* contained withing testRoot.
|
||||
* contained within testRoot.
|
||||
*/
|
||||
File outputRoot() {
|
||||
return new File(testRoot, '/rest-api-spec/test')
|
||||
|
@ -226,10 +226,10 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
} else {
|
||||
current.println('---')
|
||||
current.println("\"line_$test.start\":")
|
||||
/* The Elasticsearch test runner doesn't support the warnings
|
||||
* construct unless you output this skip. Since we don't know
|
||||
* if this snippet will use the warnings construct we emit this
|
||||
* warning every time. */
|
||||
/* The Elasticsearch test runner doesn't support quite a few
|
||||
* constructs unless we output this skip. We don't know if
|
||||
* we're going to use these constructs, but we might so we
|
||||
* output the skip just in case. */
|
||||
current.println(" - skip:")
|
||||
current.println(" features: ")
|
||||
current.println(" - default_shards")
|
||||
|
@ -250,13 +250,13 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (test.skipTest) {
|
||||
if (test.skip) {
|
||||
if (test.continued) {
|
||||
throw new InvalidUserDataException("Continued snippets "
|
||||
+ "can't be skipped")
|
||||
}
|
||||
current.println(" - always_skip")
|
||||
current.println(" reason: $test.skipTest")
|
||||
current.println(" reason: $test.skip")
|
||||
}
|
||||
if (test.setup != null) {
|
||||
// Insert a setup defined outside of the docs
|
||||
|
@ -274,9 +274,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
}
|
||||
|
||||
private void response(Snippet response) {
|
||||
current.println(" - match: ")
|
||||
current.println(" \$body: ")
|
||||
response.contents.eachLine { current.println(" $it") }
|
||||
if (null == response.skip) {
|
||||
current.println(" - match: ")
|
||||
current.println(" \$body: ")
|
||||
response.contents.eachLine { current.println(" $it") }
|
||||
}
|
||||
}
|
||||
|
||||
void emitDo(String method, String pathAndQuery, String body,
|
||||
|
|
|
@ -122,7 +122,9 @@ public class SnippetsTask extends DefaultTask {
|
|||
+ "contain `curl`.")
|
||||
}
|
||||
}
|
||||
if (snippet.testResponse && snippet.language == 'js') {
|
||||
if (snippet.testResponse
|
||||
&& 'js' == snippet.language
|
||||
&& null == snippet.skip) {
|
||||
String quoted = snippet.contents
|
||||
// quote values starting with $
|
||||
.replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"')
|
||||
|
@ -216,7 +218,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
return
|
||||
}
|
||||
if (it.group(4) != null) {
|
||||
snippet.skipTest = it.group(4)
|
||||
snippet.skip = it.group(4)
|
||||
return
|
||||
}
|
||||
if (it.group(5) != null) {
|
||||
|
@ -249,7 +251,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
substitutions = []
|
||||
}
|
||||
String loc = "$file:$lineNumber"
|
||||
parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT) ?/) {
|
||||
parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT|$SKIP) ?/) {
|
||||
if (it.group(1) != null) {
|
||||
// TESTRESPONSE[s/adsf/jkl/]
|
||||
substitutions.add([it.group(1), it.group(2)])
|
||||
|
@ -259,6 +261,9 @@ public class SnippetsTask extends DefaultTask {
|
|||
substitutions.add(['\n$', '\\\\s*/'])
|
||||
substitutions.add(['( +)', '$1\\\\s+'])
|
||||
substitutions.add(['\n', '\\\\s*\n '])
|
||||
} else if (it.group(4) != null) {
|
||||
// TESTRESPONSE[skip:reason]
|
||||
snippet.skip = it.group(4)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -312,7 +317,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
boolean test = false
|
||||
boolean testResponse = false
|
||||
boolean testSetup = false
|
||||
String skipTest = null
|
||||
String skip = null
|
||||
boolean continued = false
|
||||
String language = null
|
||||
String catchPart = null
|
||||
|
@ -337,8 +342,8 @@ public class SnippetsTask extends DefaultTask {
|
|||
if (catchPart) {
|
||||
result += "[catch: $catchPart]"
|
||||
}
|
||||
if (skipTest) {
|
||||
result += "[skip=$skipTest]"
|
||||
if (skip) {
|
||||
result += "[skip=$skip]"
|
||||
}
|
||||
if (continued) {
|
||||
result += '[continued]'
|
||||
|
@ -352,6 +357,9 @@ public class SnippetsTask extends DefaultTask {
|
|||
}
|
||||
if (testResponse) {
|
||||
result += '// TESTRESPONSE'
|
||||
if (skip) {
|
||||
result += "[skip=$skip]"
|
||||
}
|
||||
}
|
||||
if (testSetup) {
|
||||
result += '// TESTSETUP'
|
||||
|
|
|
@ -337,7 +337,7 @@ class NodeInfo {
|
|||
case 'deb':
|
||||
return new File(baseDir, "${distro}-extracted/etc/elasticsearch")
|
||||
default:
|
||||
throw new InvalidUserDataException("Unkown distribution: ${distro}")
|
||||
throw new InvalidUserDataException("Unknown distribution: ${distro}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ public class JdkJarHellCheck {
|
|||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
|
||||
String entry = root.relativize(file).toString().replace('\\', '/');
|
||||
if (entry.endsWith(".class")) {
|
||||
if (entry.endsWith(".class") && entry.endsWith("module-info.class") == false) {
|
||||
if (ext.getResource(entry) != null) {
|
||||
detected.add(
|
||||
entry
|
||||
|
|
|
@ -20,12 +20,12 @@ package org.elasticsearch.gradle.precommit;
|
|||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.elasticsearch.gradle.JdkJarHellCheck;
|
||||
import org.elasticsearch.test.NamingConventionsCheck;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputFile;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
|
@ -47,6 +47,7 @@ import java.util.TreeSet;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
public class ThirdPartyAuditTask extends DefaultTask {
|
||||
|
||||
|
@ -171,19 +172,38 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
|||
File jarExpandDir = getJarExpandDir();
|
||||
// We need to clean up to make sure old dependencies don't linger
|
||||
getProject().delete(jarExpandDir);
|
||||
jars.forEach(jar ->
|
||||
|
||||
jars.forEach(jar -> {
|
||||
FileTree jarFiles = getProject().zipTree(jar);
|
||||
getProject().copy(spec -> {
|
||||
spec.from(jarFiles);
|
||||
spec.into(jarExpandDir);
|
||||
// exclude classes from multi release jars
|
||||
spec.exclude("META-INF/versions/**");
|
||||
});
|
||||
// Deal with multi release jars:
|
||||
// The order is important, we iterate here so we don't depend on the order in which Gradle executes the spec
|
||||
// We extract multi release jar classes ( if these exist ) going from 9 - the first to support them, to the
|
||||
// current `targetCompatibility` version.
|
||||
// Each extract will overwrite the top level classes that existed before it, the result is that we end up
|
||||
// with a single version of the class in `jarExpandDir`.
|
||||
// This will be the closes version to `targetCompatibility`, the same class that would be loaded in a JVM
|
||||
// that has `targetCompatibility` version.
|
||||
// This means we only scan classes that would be loaded into `targetCompatibility`, and don't look at any
|
||||
// pther version specific implementation of said classes.
|
||||
IntStream.rangeClosed(
|
||||
Integer.parseInt(JavaVersion.VERSION_1_9.getMajorVersion()),
|
||||
Integer.parseInt(targetCompatibility.getMajorVersion())
|
||||
).forEach(majorVersion -> getProject().copy(spec -> {
|
||||
spec.from(getProject().zipTree(jar));
|
||||
spec.into(jarExpandDir);
|
||||
// Exclude classes for multi release jars above target
|
||||
for (int i = Integer.parseInt(targetCompatibility.getMajorVersion()) + 1;
|
||||
i <= Integer.parseInt(JavaVersion.VERSION_HIGHER.getMajorVersion());
|
||||
i++
|
||||
) {
|
||||
spec.exclude("META-INF/versions/" + i + "/**");
|
||||
}
|
||||
})
|
||||
);
|
||||
String metaInfPrefix = "META-INF/versions/" + majorVersion;
|
||||
spec.include(metaInfPrefix + "/**");
|
||||
// Drop the version specific prefix
|
||||
spec.eachFile(details -> details.setPath(details.getPath().replace(metaInfPrefix, "")));
|
||||
spec.setIncludeEmptyDirs(false);
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
private void assertNoJarHell(Set<String> jdkJarHellClasses) {
|
||||
|
@ -276,9 +296,9 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
|||
private Set<String> runJdkJarHellCheck() throws IOException {
|
||||
ByteArrayOutputStream standardOut = new ByteArrayOutputStream();
|
||||
ExecResult execResult = getProject().javaexec(spec -> {
|
||||
URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation();
|
||||
URL location = JdkJarHellCheck.class.getProtectionDomain().getCodeSource().getLocation();
|
||||
if (location.getProtocol().equals("file") == false) {
|
||||
throw new GradleException("Unexpected location for NamingConventionCheck class: " + location);
|
||||
throw new GradleException("Unexpected location for JdkJarHellCheck class: " + location);
|
||||
}
|
||||
try {
|
||||
spec.classpath(
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
package org.elasticsearch.gradle.testclusters;
|
||||
|
||||
import org.elasticsearch.gradle.Distribution;
|
||||
import org.elasticsearch.gradle.Version;
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
package org.elasticsearch.gradle.testclusters;
|
||||
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
import org.elasticsearch.gradle.Distribution;
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
package org.elasticsearch.gradle.testclusters;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
|
@ -37,12 +37,12 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ClusterformationPlugin implements Plugin<Project> {
|
||||
public class TestClustersPlugin implements Plugin<Project> {
|
||||
|
||||
public static final String LIST_TASK_NAME = "listElasticSearchClusters";
|
||||
public static final String EXTENSION_NAME = "elasticSearchClusters";
|
||||
|
||||
private final Logger logger = Logging.getLogger(ClusterformationPlugin.class);
|
||||
private final Logger logger = Logging.getLogger(TestClustersPlugin.class);
|
||||
|
||||
@Override
|
||||
public void apply(Project project) {
|
|
@ -1 +0,0 @@
|
|||
implementation-class=org.elasticsearch.gradle.clusterformation.ClusterformationPlugin
|
|
@ -0,0 +1 @@
|
|||
implementation-class=org.elasticsearch.gradle.testclusters.TestClustersPlugin
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
package org.elasticsearch.gradle.testclusters;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
|
@ -26,11 +26,11 @@ import org.gradle.testkit.runner.TaskOutcome;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
||||
public class TestClustersPluginIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testListClusters() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("listElasticSearchClusters", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
@ -45,7 +45,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
|||
|
||||
public void testUseClusterByOne() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("user1", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
@ -60,7 +60,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
|||
|
||||
public void testUseClusterByOneWithDryRun() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("user1", "-s", "--dry-run")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
@ -75,7 +75,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
|||
|
||||
public void testUseClusterByTwo() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("user1", "user2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
@ -92,7 +92,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
|||
|
||||
public void testUseClusterByUpToDateTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("upToDate1", "upToDate2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
@ -109,7 +109,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
|||
|
||||
public void testUseClusterBySkippedTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("skipped1", "skipped2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
@ -126,7 +126,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
|||
|
||||
public void tetUseClusterBySkippedAndWorkingTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withProjectDir(getProjectDir("testclusters"))
|
||||
.withArguments("skipped1", "user1", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
|
@ -22,7 +22,7 @@ task sample {
|
|||
// dependsOn buildResources.outputDir
|
||||
// for now it's just
|
||||
dependsOn buildResources
|
||||
// we have to refference it at configuration time in order to be picked up
|
||||
// we have to reference it at configuration time in order to be picked up
|
||||
ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml')
|
||||
doLast {
|
||||
println "This task is using ${file(checkstyle_suppressions)}"
|
||||
|
@ -35,4 +35,4 @@ task noConfigAfterExecution {
|
|||
println "This should cause an error because we are refferencing " +
|
||||
"${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
plugins {
|
||||
id 'elasticsearch.clusterformation'
|
||||
id 'elasticsearch.testclusters'
|
||||
}
|
||||
|
||||
elasticSearchClusters {
|
|
@ -15,6 +15,8 @@ slf4j = 1.6.2
|
|||
# when updating the JNA version, also update the version in buildSrc/build.gradle
|
||||
jna = 4.5.1
|
||||
|
||||
netty = 4.1.30.Final
|
||||
|
||||
# test dependencies
|
||||
randomizedrunner = 2.7.0
|
||||
junit = 4.12
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
|
@ -83,7 +84,7 @@ public final class IngestClient {
|
|||
*/
|
||||
public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options,
|
||||
GetPipelineResponse::fromXContent, emptySet());
|
||||
GetPipelineResponse::fromXContent, Collections.singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -96,7 +97,7 @@ public final class IngestClient {
|
|||
*/
|
||||
public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener<GetPipelineResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options,
|
||||
GetPipelineResponse::fromXContent, listener, emptySet());
|
||||
GetPipelineResponse::fromXContent, listener, Collections.singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
|
|||
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
|
@ -45,6 +46,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
|||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
|
@ -146,7 +148,12 @@ final class MLRequestConverters {
|
|||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.putParam("force", Boolean.toString(deleteJobRequest.isForce()));
|
||||
if (deleteJobRequest.getForce() != null) {
|
||||
params.putParam("force", Boolean.toString(deleteJobRequest.getForce()));
|
||||
}
|
||||
if (deleteJobRequest.getWaitForCompletion() != null) {
|
||||
params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion()));
|
||||
}
|
||||
|
||||
return request;
|
||||
}
|
||||
|
@ -259,6 +266,34 @@ final class MLRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds()))
|
||||
.addPathPartAsIs("_stats")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
if (getDatafeedStatsRequest.isAllowNoDatafeeds() != null) {
|
||||
params.putParam("allow_no_datafeeds", Boolean.toString(getDatafeedStatsRequest.isAllowNoDatafeeds()));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(previewDatafeedRequest.getDatafeedId())
|
||||
.addPathPartAsIs("_preview")
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest;
|
|||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
|
@ -38,6 +39,8 @@ import org.elasticsearch.client.ml.GetCategoriesRequest;
|
|||
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedStatsResponse;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersResponse;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
|
@ -52,6 +55,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
|
|||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarResponse;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
|
@ -181,7 +186,7 @@ public final class MachineLearningClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info, asynchronously.
|
||||
* Gets usage statistics for one or more Machine Learning jobs, asynchronously.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get job stats docs</a>
|
||||
|
@ -207,14 +212,15 @@ public final class MachineLearningClient {
|
|||
*
|
||||
* @param request The request to delete the job
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return action acknowledgement
|
||||
* @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for
|
||||
* completion
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
|
||||
public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::deleteJob,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
DeleteJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -228,11 +234,11 @@ public final class MachineLearningClient {
|
|||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::deleteJob,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
DeleteJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -649,6 +655,90 @@ public final class MachineLearningClient {
|
|||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets statistics for one or more Machine Learning datafeeds
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html">Get datafeed stats docs</a>
|
||||
*
|
||||
* @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link GetDatafeedStatsResponse} response object containing
|
||||
* the {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} objects and the number of datafeeds found
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetDatafeedStatsResponse getDatafeedStats(GetDatafeedStatsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getDatafeedStats,
|
||||
options,
|
||||
GetDatafeedStatsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Previews the given Machine Learning Datafeed
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html">
|
||||
* ML Preview Datafeed documentation</a>
|
||||
*
|
||||
* @param request The request to preview the datafeed
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in
|
||||
* JSON format
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::previewDatafeed,
|
||||
options,
|
||||
PreviewDatafeedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets statistics for one or more Machine Learning datafeeds, asynchronously.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html">Get datafeed stats docs</a>
|
||||
*
|
||||
* @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified with {@link GetDatafeedStatsResponse} upon request completion
|
||||
*/
|
||||
public void getDatafeedStatsAsync(GetDatafeedStatsRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<GetDatafeedStatsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getDatafeedStats,
|
||||
options,
|
||||
GetDatafeedStatsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html">
|
||||
* ML Preview Datafeed documentation</a>
|
||||
*
|
||||
* @param request The request to preview the datafeed
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void previewDatafeedAsync(PreviewDatafeedRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<PreviewDatafeedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::previewDatafeed,
|
||||
options,
|
||||
PreviewDatafeedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
|
||||
* <p>
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchResponse;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
|
@ -121,4 +123,31 @@ public final class WatcherClient {
|
|||
AckWatchResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Activate a watch from the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-activate-watch.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public ActivateWatchResponse activateWatch(ActivateWatchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::activateWatch, options,
|
||||
ActivateWatchResponse::fromXContent, singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously activates a watch from the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-activate-watch.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void activateWatchAsync(ActivateWatchRequest request, RequestOptions options, ActionListener<ActivateWatchResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::activateWatch, options,
|
||||
ActivateWatchResponse::fromXContent, listener, singleton(404));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
|
|||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
|
@ -73,4 +74,16 @@ public class WatcherRequestConverters {
|
|||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request activateWatch(ActivateWatchRequest activateWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(activateWatchRequest.getWatchId())
|
||||
.addPathPartAsIs("_activate")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,8 @@ import java.util.Objects;
|
|||
public class DeleteJobRequest extends ActionRequest {
|
||||
|
||||
private String jobId;
|
||||
private boolean force;
|
||||
private Boolean force;
|
||||
private Boolean waitForCompletion;
|
||||
|
||||
public DeleteJobRequest(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
|
||||
|
@ -47,7 +48,7 @@ public class DeleteJobRequest extends ActionRequest {
|
|||
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
|
||||
}
|
||||
|
||||
public boolean isForce() {
|
||||
public Boolean getForce() {
|
||||
return force;
|
||||
}
|
||||
|
||||
|
@ -57,10 +58,24 @@ public class DeleteJobRequest extends ActionRequest {
|
|||
*
|
||||
* @param force When {@code true} forcefully delete an opened job. Defaults to {@code false}
|
||||
*/
|
||||
public void setForce(boolean force) {
|
||||
public void setForce(Boolean force) {
|
||||
this.force = force;
|
||||
}
|
||||
|
||||
public Boolean getWaitForCompletion() {
|
||||
return waitForCompletion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether this request should wait until the operation has completed before returning
|
||||
* @param waitForCompletion When {@code true} the call will wait for the job deletion to complete.
|
||||
* Otherwise, the deletion will be executed asynchronously and the response
|
||||
* will contain the task id.
|
||||
*/
|
||||
public void setWaitForCompletion(Boolean waitForCompletion) {
|
||||
this.waitForCompletion = waitForCompletion;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response object that contains the acknowledgement or the task id
|
||||
* depending on whether the delete job action was requested to wait for completion.
|
||||
*/
|
||||
public class DeleteJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
||||
private static final ParseField TASK = new ParseField("task");
|
||||
|
||||
public static final ConstructingObjectParser<DeleteJobResponse, Void> PARSER = new ConstructingObjectParser<>("delete_job_response",
|
||||
true, a-> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING);
|
||||
}
|
||||
|
||||
public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private final Boolean acknowledged;
|
||||
private final TaskId task;
|
||||
|
||||
DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) {
|
||||
assert acknowledged != null || task != null;
|
||||
this.acknowledged = acknowledged;
|
||||
this.task = task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the action acknowledgement
|
||||
* @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or
|
||||
* otherwise a {@code boolean} that indicates whether the job was deleted successfully.
|
||||
*/
|
||||
public Boolean getAcknowledged() {
|
||||
return acknowledged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the task id
|
||||
* @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or
|
||||
* otherwise the id of the job deletion task.
|
||||
*/
|
||||
public TaskId getTask() {
|
||||
return task;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(acknowledged, task);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DeleteJobResponse that = (DeleteJobResponse) other;
|
||||
return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (acknowledged != null) {
|
||||
builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged);
|
||||
}
|
||||
if (task != null) {
|
||||
builder.field(TASK.getPreferredName(), task.toString());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request object to get {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} by their respective datafeedIds
|
||||
*
|
||||
* {@code _all} explicitly gets all the datafeeds' statistics in the cluster
|
||||
* An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster
|
||||
*/
|
||||
public class GetDatafeedStatsRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetDatafeedStatsRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_datafeed_stats_request", a -> new GetDatafeedStatsRequest((List<String>) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||
p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
|
||||
DatafeedConfig.ID, ObjectParser.ValueType.STRING_ARRAY);
|
||||
PARSER.declareBoolean(GetDatafeedStatsRequest::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS);
|
||||
}
|
||||
|
||||
private static final String ALL_DATAFEEDS = "_all";
|
||||
|
||||
private final List<String> datafeedIds;
|
||||
private Boolean allowNoDatafeeds;
|
||||
|
||||
/**
|
||||
* Explicitly gets all datafeeds statistics
|
||||
*
|
||||
* @return a {@link GetDatafeedStatsRequest} for all existing datafeeds
|
||||
*/
|
||||
public static GetDatafeedStatsRequest getAllDatafeedStatsRequest(){
|
||||
return new GetDatafeedStatsRequest(ALL_DATAFEEDS);
|
||||
}
|
||||
|
||||
GetDatafeedStatsRequest(List<String> datafeedIds) {
|
||||
if (datafeedIds.stream().anyMatch(Objects::isNull)) {
|
||||
throw new NullPointerException("datafeedIds must not contain null values");
|
||||
}
|
||||
this.datafeedIds = new ArrayList<>(datafeedIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the specified Datafeed's statistics via their unique datafeedIds
|
||||
*
|
||||
* @param datafeedIds must be non-null and each datafeedId must be non-null
|
||||
*/
|
||||
public GetDatafeedStatsRequest(String... datafeedIds) {
|
||||
this(Arrays.asList(datafeedIds));
|
||||
}
|
||||
|
||||
/**
|
||||
* All the datafeedIds for which to get statistics
|
||||
*/
|
||||
public List<String> getDatafeedIds() {
|
||||
return datafeedIds;
|
||||
}
|
||||
|
||||
public Boolean isAllowNoDatafeeds() {
|
||||
return this.allowNoDatafeeds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no datafeeds.
|
||||
*
|
||||
* This includes {@code _all} string or when no datafeeds have been specified
|
||||
*
|
||||
* @param allowNoDatafeeds When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true}
|
||||
*/
|
||||
public void setAllowNoDatafeeds(boolean allowNoDatafeeds) {
|
||||
this.allowNoDatafeeds = allowNoDatafeeds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedIds, allowNoDatafeeds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetDatafeedStatsRequest that = (GetDatafeedStatsRequest) other;
|
||||
return Objects.equals(datafeedIds, that.datafeedIds) &&
|
||||
Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds));
|
||||
if (allowNoDatafeeds != null) {
|
||||
builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* Contains a {@link List} of the found {@link DatafeedStats} objects and the total count found
|
||||
*/
|
||||
public class GetDatafeedStatsResponse extends AbstractResultResponse<DatafeedStats> {
|
||||
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetDatafeedStatsResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("get_datafeed_stats_response",
|
||||
true,
|
||||
a -> new GetDatafeedStatsResponse((List<DatafeedStats>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), DatafeedStats.PARSER, RESULTS_FIELD);
|
||||
PARSER.declareLong(constructorArg(), COUNT);
|
||||
}
|
||||
|
||||
GetDatafeedStatsResponse(List<DatafeedStats> results, long count) {
|
||||
super(RESULTS_FIELD, results, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The collection of {@link DatafeedStats} objects found in the query
|
||||
*/
|
||||
public List<DatafeedStats> datafeedStats() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public static GetDatafeedStatsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(results, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetDatafeedStatsResponse other = (GetDatafeedStatsResponse) obj;
|
||||
return Objects.equals(results, other.results) && count == other.count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to preview a MachineLearning Datafeed
|
||||
*/
|
||||
public class PreviewDatafeedRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ConstructingObjectParser<PreviewDatafeedRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"open_datafeed_request", true, a -> new PreviewDatafeedRequest((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID);
|
||||
}
|
||||
|
||||
public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private final String datafeedId;
|
||||
|
||||
/**
|
||||
* Create a new request with the desired datafeedId
|
||||
*
|
||||
* @param datafeedId unique datafeedId, must not be null
|
||||
*/
|
||||
public PreviewDatafeedRequest(String datafeedId) {
|
||||
this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null");
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PreviewDatafeedRequest that = (PreviewDatafeedRequest) other;
|
||||
return Objects.equals(datafeedId, that.datafeedId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Response containing a datafeed preview in JSON format
|
||||
*/
|
||||
public class PreviewDatafeedResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private BytesReference preview;
|
||||
|
||||
public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException {
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
return new PreviewDatafeedResponse(BytesReference.bytes(builder));
|
||||
}
|
||||
}
|
||||
|
||||
public PreviewDatafeedResponse(BytesReference preview) {
|
||||
this.preview = preview;
|
||||
}
|
||||
|
||||
public BytesReference getPreview() {
|
||||
return preview;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the preview to a list of {@link Map} objects
|
||||
* @return List of previewed data
|
||||
* @throws IOException If there is a parsing issue with the {@link BytesReference}
|
||||
* @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Map<String, Object>> getDataList() throws IOException {
|
||||
try(StreamInput streamInput = preview.streamInput();
|
||||
XContentParser parser = XContentType.JSON.xContent()
|
||||
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
return parser.listOrderedMap().stream().map(obj -> (Map<String, Object>)obj).collect(Collectors.toList());
|
||||
} else {
|
||||
return Collections.singletonList(parser.mapOrdered());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
try (InputStream stream = preview.streamInput()) {
|
||||
builder.rawValue(stream, XContentType.JSON);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(preview);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj;
|
||||
return Objects.equals(preview, other.preview);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Datafeed State POJO
|
||||
*/
|
||||
public enum DatafeedState {
|
||||
|
||||
STARTED, STOPPED, STARTING, STOPPING;
|
||||
|
||||
public static final ParseField STATE = new ParseField("state");
|
||||
|
||||
public static DatafeedState fromString(String name) {
|
||||
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.client.ml.NodeAttributes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Datafeed Statistics POJO
|
||||
*/
|
||||
public class DatafeedStats implements ToXContentObject {
|
||||
|
||||
private final String datafeedId;
|
||||
private final DatafeedState datafeedState;
|
||||
@Nullable
|
||||
private final NodeAttributes node;
|
||||
@Nullable
|
||||
private final String assignmentExplanation;
|
||||
|
||||
public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
|
||||
public static final ParseField NODE = new ParseField("node");
|
||||
|
||||
public static final ConstructingObjectParser<DatafeedStats, Void> PARSER = new ConstructingObjectParser<>("datafeed_stats",
|
||||
true,
|
||||
a -> {
|
||||
String datafeedId = (String)a[0];
|
||||
DatafeedState datafeedState = DatafeedState.fromString((String)a[1]);
|
||||
NodeAttributes nodeAttributes = (NodeAttributes)a[2];
|
||||
String assignmentExplanation = (String)a[3];
|
||||
return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation);
|
||||
} );
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
|
||||
}
|
||||
|
||||
public DatafeedStats(String datafeedId, DatafeedState datafeedState, @Nullable NodeAttributes node,
|
||||
@Nullable String assignmentExplanation) {
|
||||
this.datafeedId = Objects.requireNonNull(datafeedId);
|
||||
this.datafeedState = Objects.requireNonNull(datafeedState);
|
||||
this.node = node;
|
||||
this.assignmentExplanation = assignmentExplanation;
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
public DatafeedState getDatafeedState() {
|
||||
return datafeedState;
|
||||
}
|
||||
|
||||
public NodeAttributes getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
public String getAssignmentExplanation() {
|
||||
return assignmentExplanation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
|
||||
builder.field(DatafeedState.STATE.getPreferredName(), datafeedState.toString());
|
||||
if (node != null) {
|
||||
builder.startObject("node");
|
||||
builder.field("id", node.getId());
|
||||
builder.field("name", node.getName());
|
||||
builder.field("ephemeral_id", node.getEphemeralId());
|
||||
builder.field("transport_address", node.getTransportAddress());
|
||||
|
||||
builder.startObject("attributes");
|
||||
for (Map.Entry<String, String> entry : node.getAttributes().entrySet()) {
|
||||
if (entry.getKey().startsWith("ml.")) {
|
||||
builder.field(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
}
|
||||
if (assignmentExplanation != null) {
|
||||
builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
DatafeedStats other = (DatafeedStats) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId) &&
|
||||
Objects.equals(this.datafeedState, other.datafeedState) &&
|
||||
Objects.equals(this.node, other.node) &&
|
||||
Objects.equals(this.assignmentExplanation, other.assignmentExplanation);
|
||||
}
|
||||
}
|
|
@ -215,7 +215,7 @@ public class Detector implements ToXContentObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* Excludes frequently-occuring metrics from the analysis;
|
||||
* Excludes frequently-occurring metrics from the analysis;
|
||||
* can apply to 'by' field, 'over' field, or both
|
||||
*
|
||||
* @return the value that the user set
|
||||
|
|
|
@ -57,7 +57,6 @@ public class Job implements ToXContentObject {
|
|||
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
|
||||
public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time");
|
||||
public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory");
|
||||
public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config");
|
||||
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
|
||||
|
@ -66,6 +65,7 @@ public class Job implements ToXContentObject {
|
|||
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
|
||||
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
|
||||
public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name");
|
||||
public static final ParseField DELETING = new ParseField("deleting");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", true, Builder::new);
|
||||
|
||||
|
@ -82,10 +82,6 @@ public class Job implements ToXContentObject {
|
|||
(p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()),
|
||||
FINISHED_TIME,
|
||||
ValueType.VALUE);
|
||||
PARSER.declareField(Builder::setLastDataTime,
|
||||
(p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()),
|
||||
LAST_DATA_TIME,
|
||||
ValueType.VALUE);
|
||||
PARSER.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY);
|
||||
PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG);
|
||||
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS);
|
||||
|
@ -99,6 +95,7 @@ public class Job implements ToXContentObject {
|
|||
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
||||
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
||||
PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
|
||||
PARSER.declareBoolean(Builder::setDeleting, DELETING);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
|
@ -108,7 +105,6 @@ public class Job implements ToXContentObject {
|
|||
private final String description;
|
||||
private final Date createTime;
|
||||
private final Date finishedTime;
|
||||
private final Date lastDataTime;
|
||||
private final Long establishedModelMemory;
|
||||
private final AnalysisConfig analysisConfig;
|
||||
private final AnalysisLimits analysisLimits;
|
||||
|
@ -121,13 +117,14 @@ public class Job implements ToXContentObject {
|
|||
private final Map<String, Object> customSettings;
|
||||
private final String modelSnapshotId;
|
||||
private final String resultsIndexName;
|
||||
private final Boolean deleting;
|
||||
|
||||
private Job(String jobId, String jobType, List<String> groups, String description, Date createTime,
|
||||
Date finishedTime, Date lastDataTime, Long establishedModelMemory,
|
||||
private Job(String jobId, String jobType, List<String> groups, String description,
|
||||
Date createTime, Date finishedTime, Long establishedModelMemory,
|
||||
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
||||
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
|
||||
String modelSnapshotId, String resultsIndexName) {
|
||||
String modelSnapshotId, String resultsIndexName, Boolean deleting) {
|
||||
|
||||
this.jobId = jobId;
|
||||
this.jobType = jobType;
|
||||
|
@ -135,7 +132,6 @@ public class Job implements ToXContentObject {
|
|||
this.description = description;
|
||||
this.createTime = createTime;
|
||||
this.finishedTime = finishedTime;
|
||||
this.lastDataTime = lastDataTime;
|
||||
this.establishedModelMemory = establishedModelMemory;
|
||||
this.analysisConfig = analysisConfig;
|
||||
this.analysisLimits = analysisLimits;
|
||||
|
@ -148,6 +144,7 @@ public class Job implements ToXContentObject {
|
|||
this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings);
|
||||
this.modelSnapshotId = modelSnapshotId;
|
||||
this.resultsIndexName = resultsIndexName;
|
||||
this.deleting = deleting;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -205,16 +202,6 @@ public class Job implements ToXContentObject {
|
|||
return finishedTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The last time data was uploaded to the job or <code>null</code> if no
|
||||
* data has been seen.
|
||||
*
|
||||
* @return The date at which the last data was processed
|
||||
*/
|
||||
public Date getLastDataTime() {
|
||||
return lastDataTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The established model memory of the job, or <code>null</code> if model
|
||||
* memory has not reached equilibrium yet.
|
||||
|
@ -292,6 +279,10 @@ public class Job implements ToXContentObject {
|
|||
return modelSnapshotId;
|
||||
}
|
||||
|
||||
public Boolean getDeleting() {
|
||||
return deleting;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -313,10 +304,6 @@ public class Job implements ToXContentObject {
|
|||
builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix,
|
||||
finishedTime.getTime());
|
||||
}
|
||||
if (lastDataTime != null) {
|
||||
builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix,
|
||||
lastDataTime.getTime());
|
||||
}
|
||||
if (establishedModelMemory != null) {
|
||||
builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory);
|
||||
}
|
||||
|
@ -351,6 +338,9 @@ public class Job implements ToXContentObject {
|
|||
if (resultsIndexName != null) {
|
||||
builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName);
|
||||
}
|
||||
if (deleting != null) {
|
||||
builder.field(DELETING.getPreferredName(), deleting);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
@ -372,7 +362,6 @@ public class Job implements ToXContentObject {
|
|||
&& Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.createTime, that.createTime)
|
||||
&& Objects.equals(this.finishedTime, that.finishedTime)
|
||||
&& Objects.equals(this.lastDataTime, that.lastDataTime)
|
||||
&& Objects.equals(this.establishedModelMemory, that.establishedModelMemory)
|
||||
&& Objects.equals(this.analysisConfig, that.analysisConfig)
|
||||
&& Objects.equals(this.analysisLimits, that.analysisLimits)
|
||||
|
@ -384,15 +373,16 @@ public class Job implements ToXContentObject {
|
|||
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
|
||||
&& Objects.equals(this.customSettings, that.customSettings)
|
||||
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
|
||||
&& Objects.equals(this.resultsIndexName, that.resultsIndexName);
|
||||
&& Objects.equals(this.resultsIndexName, that.resultsIndexName)
|
||||
&& Objects.equals(this.deleting, that.deleting);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
||||
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, establishedModelMemory,
|
||||
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||
modelSnapshotId, resultsIndexName);
|
||||
modelSnapshotId, resultsIndexName, deleting);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -415,7 +405,6 @@ public class Job implements ToXContentObject {
|
|||
private DataDescription dataDescription;
|
||||
private Date createTime;
|
||||
private Date finishedTime;
|
||||
private Date lastDataTime;
|
||||
private Long establishedModelMemory;
|
||||
private ModelPlotConfig modelPlotConfig;
|
||||
private Long renormalizationWindowDays;
|
||||
|
@ -425,6 +414,7 @@ public class Job implements ToXContentObject {
|
|||
private Map<String, Object> customSettings;
|
||||
private String modelSnapshotId;
|
||||
private String resultsIndexName;
|
||||
private Boolean deleting;
|
||||
|
||||
private Builder() {
|
||||
}
|
||||
|
@ -443,7 +433,6 @@ public class Job implements ToXContentObject {
|
|||
this.dataDescription = job.getDataDescription();
|
||||
this.createTime = job.getCreateTime();
|
||||
this.finishedTime = job.getFinishedTime();
|
||||
this.lastDataTime = job.getLastDataTime();
|
||||
this.establishedModelMemory = job.getEstablishedModelMemory();
|
||||
this.modelPlotConfig = job.getModelPlotConfig();
|
||||
this.renormalizationWindowDays = job.getRenormalizationWindowDays();
|
||||
|
@ -453,6 +442,7 @@ public class Job implements ToXContentObject {
|
|||
this.customSettings = job.getCustomSettings();
|
||||
this.modelSnapshotId = job.getModelSnapshotId();
|
||||
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
|
||||
this.deleting = job.getDeleting();
|
||||
}
|
||||
|
||||
public Builder setId(String id) {
|
||||
|
@ -504,16 +494,6 @@ public class Job implements ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the wall clock time of the last data upload
|
||||
*
|
||||
* @param lastDataTime Wall clock time
|
||||
*/
|
||||
public Builder setLastDataTime(Date lastDataTime) {
|
||||
this.lastDataTime = lastDataTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setEstablishedModelMemory(Long establishedModelMemory) {
|
||||
this.establishedModelMemory = establishedModelMemory;
|
||||
return this;
|
||||
|
@ -559,6 +539,11 @@ public class Job implements ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
Builder setDeleting(Boolean deleting) {
|
||||
this.deleting = deleting;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a job.
|
||||
*
|
||||
|
@ -568,10 +553,10 @@ public class Job implements ToXContentObject {
|
|||
Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null");
|
||||
Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null");
|
||||
return new Job(
|
||||
id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
||||
id, jobType, groups, description, createTime, finishedTime, establishedModelMemory,
|
||||
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||
modelSnapshotId, resultsIndexName);
|
||||
modelSnapshotId, resultsIndexName, deleting);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
* Result fields (all detector types)
|
||||
*/
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact");
|
||||
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
||||
|
@ -94,6 +95,7 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
||||
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
||||
PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT);
|
||||
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
|
||||
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
|
||||
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX);
|
||||
|
@ -117,6 +119,7 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
private final String jobId;
|
||||
private int detectorIndex;
|
||||
private double probability;
|
||||
private Double multiBucketImpact;
|
||||
private String byFieldName;
|
||||
private String byFieldValue;
|
||||
private String correlatedByFieldValue;
|
||||
|
@ -155,6 +158,9 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||
if (multiBucketImpact != null) {
|
||||
builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact);
|
||||
}
|
||||
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
|
||||
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
|
@ -254,6 +260,14 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
probability = value;
|
||||
}
|
||||
|
||||
public double getMultiBucketImpact() {
|
||||
return multiBucketImpact;
|
||||
}
|
||||
|
||||
void setMultiBucketImpact(double value) {
|
||||
multiBucketImpact = value;
|
||||
}
|
||||
|
||||
public String getByFieldName() {
|
||||
return byFieldName;
|
||||
}
|
||||
|
@ -376,7 +390,7 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore,
|
||||
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,
|
||||
initialRecordScore, typical, actual,function, functionDescription, fieldName,
|
||||
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
|
||||
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
|
||||
|
@ -399,6 +413,7 @@ public class AnomalyRecord implements ToXContentObject {
|
|||
&& this.detectorIndex == that.detectorIndex
|
||||
&& this.bucketSpan == that.bucketSpan
|
||||
&& this.probability == that.probability
|
||||
&& Objects.equals(this.multiBucketImpact, that.multiBucketImpact)
|
||||
&& this.recordScore == that.recordScore
|
||||
&& this.initialRecordScore == that.initialRecordScore
|
||||
&& Objects.deepEquals(this.typical, that.typical)
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to explicitly activate a watch.
|
||||
*/
|
||||
public final class ActivateWatchRequest implements Validatable {
|
||||
|
||||
private final String watchId;
|
||||
|
||||
public ActivateWatchRequest(String watchId) {
|
||||
this.watchId = Objects.requireNonNull(watchId, "Watch identifier is required");
|
||||
if (PutWatchRequest.isValidId(this.watchId) == false) {
|
||||
throw new IllegalArgumentException("Watch identifier contains whitespace");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The ID of the watch to be activated.
|
||||
*/
|
||||
public String getWatchId() {
|
||||
return watchId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ActivateWatchRequest that = (ActivateWatchRequest) o;
|
||||
return Objects.equals(watchId, that.watchId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(watchId);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response from an 'activate watch' request.
|
||||
*/
|
||||
public final class ActivateWatchResponse {
|
||||
|
||||
private static final ParseField STATUS_FIELD = new ParseField("status");
|
||||
private static ConstructingObjectParser<ActivateWatchResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("activate_watch_response", true,
|
||||
a -> new ActivateWatchResponse((WatchStatus) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
|
||||
(parser, context) -> WatchStatus.parse(parser),
|
||||
STATUS_FIELD);
|
||||
}
|
||||
|
||||
private final WatchStatus status;
|
||||
|
||||
public ActivateWatchResponse(WatchStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public WatchStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ActivateWatchResponse that = (ActivateWatchResponse) o;
|
||||
return Objects.equals(status, that.status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(status);
|
||||
}
|
||||
|
||||
public static ActivateWatchResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -78,6 +78,16 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(expectedConfig.getConfigAsMap(), response.pipelines().get(0).getConfigAsMap());
|
||||
}
|
||||
|
||||
public void testGetNonexistentPipeline() throws IOException {
|
||||
String id = "nonexistent_pipeline_id";
|
||||
|
||||
GetPipelineRequest request = new GetPipelineRequest(id);
|
||||
|
||||
GetPipelineResponse response =
|
||||
execute(request, highLevelClient().ingest()::getPipeline, highLevelClient().ingest()::getPipelineAsync);
|
||||
assertFalse(response.isFound());
|
||||
}
|
||||
|
||||
public void testDeletePipeline() throws IOException {
|
||||
String id = "some_pipeline_id";
|
||||
{
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
|
|||
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
|
@ -41,6 +42,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
|||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
|
@ -162,11 +164,18 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.deleteJob(deleteJobRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint());
|
||||
assertEquals(Boolean.toString(false), request.getParameters().get("force"));
|
||||
assertNull(request.getParameters().get("force"));
|
||||
assertNull(request.getParameters().get("wait_for_completion"));
|
||||
|
||||
deleteJobRequest = new DeleteJobRequest(jobId);
|
||||
deleteJobRequest.setForce(true);
|
||||
request = MLRequestConverters.deleteJob(deleteJobRequest);
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
|
||||
|
||||
deleteJobRequest = new DeleteJobRequest(jobId);
|
||||
deleteJobRequest.setWaitForCompletion(false);
|
||||
request = MLRequestConverters.deleteJob(deleteJobRequest);
|
||||
assertEquals(Boolean.toString(false), request.getParameters().get("wait_for_completion"));
|
||||
}
|
||||
|
||||
public void testFlushJob() throws Exception {
|
||||
|
@ -293,6 +302,30 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetDatafeedStats() {
|
||||
GetDatafeedStatsRequest getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest();
|
||||
|
||||
Request request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/_stats", request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("allow_no_datafeeds"));
|
||||
|
||||
getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest("datafeed1", "datafeeds*");
|
||||
getDatafeedStatsRequestRequest.setAllowNoDatafeeds(true);
|
||||
request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest);
|
||||
|
||||
assertEquals("/_xpack/ml/datafeeds/datafeed1,datafeeds*/_stats", request.getEndpoint());
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_datafeeds"));
|
||||
}
|
||||
|
||||
public void testPreviewDatafeed() {
|
||||
PreviewDatafeedRequest datafeedRequest = new PreviewDatafeedRequest("datafeed_1");
|
||||
Request request = MLRequestConverters.previewDatafeed(datafeedRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/" + datafeedRequest.getDatafeedId() + "/_preview", request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testDeleteForecast() {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId);
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest;
|
|||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
|
@ -41,6 +42,8 @@ import org.elasticsearch.client.ml.GetCalendarsRequest;
|
|||
import org.elasticsearch.client.ml.GetCalendarsResponse;
|
||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetDatafeedStatsResponse;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
import org.elasticsearch.client.ml.GetJobResponse;
|
||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
|
@ -49,6 +52,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
|
|||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarResponse;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
|
@ -63,6 +68,8 @@ import org.elasticsearch.client.ml.UpdateJobRequest;
|
|||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedState;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.client.ml.job.config.Detector;
|
||||
|
@ -76,8 +83,11 @@ import org.elasticsearch.rest.RestStatus;
|
|||
import org.junit.After;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -142,17 +152,33 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
|
||||
}
|
||||
|
||||
public void testDeleteJob() throws Exception {
|
||||
public void testDeleteJob_GivenWaitForCompletionIsTrue() throws Exception {
|
||||
String jobId = randomValidJobId();
|
||||
Job job = buildJob(jobId);
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
|
||||
AcknowledgedResponse response = execute(new DeleteJobRequest(jobId),
|
||||
DeleteJobResponse response = execute(new DeleteJobRequest(jobId),
|
||||
machineLearningClient::deleteJob,
|
||||
machineLearningClient::deleteJobAsync);
|
||||
|
||||
assertTrue(response.isAcknowledged());
|
||||
assertTrue(response.getAcknowledged());
|
||||
assertNull(response.getTask());
|
||||
}
|
||||
|
||||
public void testDeleteJob_GivenWaitForCompletionIsFalse() throws Exception {
|
||||
String jobId = randomValidJobId();
|
||||
Job job = buildJob(jobId);
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
|
||||
DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId);
|
||||
deleteJobRequest.setWaitForCompletion(false);
|
||||
|
||||
DeleteJobResponse response = execute(deleteJobRequest, machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync);
|
||||
|
||||
assertNull(response.getAcknowledged());
|
||||
assertNotNull(response.getTask());
|
||||
}
|
||||
|
||||
public void testOpenJob() throws Exception {
|
||||
|
@ -564,6 +590,126 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetDatafeedStats() throws Exception {
|
||||
String jobId1 = "ml-get-datafeed-stats-test-id-1";
|
||||
String jobId2 = "ml-get-datafeed-stats-test-id-2";
|
||||
String indexName = "datafeed_stats_data_1";
|
||||
|
||||
// Set up the index
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
|
||||
createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long");
|
||||
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
|
||||
// create the job and the datafeed
|
||||
Job job1 = buildJob(jobId1);
|
||||
putJob(job1);
|
||||
openJob(job1);
|
||||
|
||||
Job job2 = buildJob(jobId2);
|
||||
putJob(job2);
|
||||
|
||||
String datafeedId1 = createAndPutDatafeed(jobId1, indexName);
|
||||
String datafeedId2 = createAndPutDatafeed(jobId2, indexName);
|
||||
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
|
||||
machineLearningClient.startDatafeed(new StartDatafeedRequest(datafeedId1), RequestOptions.DEFAULT);
|
||||
|
||||
GetDatafeedStatsRequest request = new GetDatafeedStatsRequest(datafeedId1);
|
||||
|
||||
// Test getting specific
|
||||
GetDatafeedStatsResponse response =
|
||||
execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
|
||||
|
||||
assertEquals(1, response.count());
|
||||
assertThat(response.datafeedStats(), hasSize(1));
|
||||
assertThat(response.datafeedStats().get(0).getDatafeedId(), equalTo(datafeedId1));
|
||||
assertThat(response.datafeedStats().get(0).getDatafeedState().toString(), equalTo(DatafeedState.STARTED.toString()));
|
||||
|
||||
// Test getting all explicitly
|
||||
request = GetDatafeedStatsRequest.getAllDatafeedStatsRequest();
|
||||
response = execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
|
||||
|
||||
assertTrue(response.count() >= 2L);
|
||||
assertTrue(response.datafeedStats().size() >= 2L);
|
||||
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
|
||||
hasItems(datafeedId1, datafeedId2));
|
||||
|
||||
// Test getting all implicitly
|
||||
response =
|
||||
execute(new GetDatafeedStatsRequest(), machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
|
||||
|
||||
assertTrue(response.count() >= 2L);
|
||||
assertTrue(response.datafeedStats().size() >= 2L);
|
||||
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
|
||||
hasItems(datafeedId1, datafeedId2));
|
||||
|
||||
// Test getting all with wildcard
|
||||
request = new GetDatafeedStatsRequest("ml-get-datafeed-stats-test-id-*");
|
||||
response = execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
|
||||
assertEquals(2L, response.count());
|
||||
assertThat(response.datafeedStats(), hasSize(2));
|
||||
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
|
||||
hasItems(datafeedId1, datafeedId2));
|
||||
|
||||
// Test when allow_no_jobs is false
|
||||
final GetDatafeedStatsRequest erroredRequest = new GetDatafeedStatsRequest("datafeeds-that-do-not-exist*");
|
||||
erroredRequest.setAllowNoDatafeeds(false);
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(erroredRequest, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync));
|
||||
assertThat(exception.status().getStatus(), equalTo(404));
|
||||
}
|
||||
|
||||
public void testPreviewDatafeed() throws Exception {
|
||||
String jobId = "test-preview-datafeed";
|
||||
String indexName = "preview_data_1";
|
||||
|
||||
// Set up the index and docs
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
|
||||
createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long");
|
||||
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
BulkRequest bulk = new BulkRequest();
|
||||
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
long now = (System.currentTimeMillis()/1000)*1000;
|
||||
long thePast = now - 60000;
|
||||
int i = 0;
|
||||
List<Integer> totalTotals = new ArrayList<>(60);
|
||||
while(thePast < now) {
|
||||
Integer total = randomInt(1000);
|
||||
IndexRequest doc = new IndexRequest();
|
||||
doc.index(indexName);
|
||||
doc.type("doc");
|
||||
doc.id("id" + i);
|
||||
doc.source("{\"total\":" + total + ",\"timestamp\":"+ thePast +"}", XContentType.JSON);
|
||||
bulk.add(doc);
|
||||
thePast += 1000;
|
||||
i++;
|
||||
totalTotals.add(total);
|
||||
}
|
||||
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
|
||||
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
// create the job and the datafeed
|
||||
Job job = buildJob(jobId);
|
||||
putJob(job);
|
||||
openJob(job);
|
||||
|
||||
String datafeedId = jobId + "-feed";
|
||||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId)
|
||||
.setIndices(indexName)
|
||||
.setQueryDelay(TimeValue.timeValueSeconds(1))
|
||||
.setTypes(Collections.singletonList("doc"))
|
||||
.setFrequency(TimeValue.timeValueSeconds(1)).build();
|
||||
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
PreviewDatafeedResponse response = execute(new PreviewDatafeedRequest(datafeedId),
|
||||
machineLearningClient::previewDatafeed,
|
||||
machineLearningClient::previewDatafeedAsync);
|
||||
|
||||
Integer[] totals = response.getDataList().stream().map(map -> (Integer)map.get("total")).toArray(Integer[]::new);
|
||||
assertThat(totalTotals, containsInAnyOrder(totals));
|
||||
}
|
||||
|
||||
public void testDeleteForecast() throws Exception {
|
||||
String jobId = "test-delete-forecast";
|
||||
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchResponse;
|
||||
import org.elasticsearch.client.watcher.ActionStatus;
|
||||
|
@ -33,6 +35,7 @@ import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
||||
public class WatcherIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
|
@ -108,4 +111,26 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
|
|||
new AckWatchRequest("nonexistent"), RequestOptions.DEFAULT));
|
||||
assertEquals(RestStatus.NOT_FOUND, exception.status());
|
||||
}
|
||||
|
||||
public void testActivateWatchThatExists() throws Exception {
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
createWatch(watchId);
|
||||
ActivateWatchResponse activateWatchResponse1 = highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId),
|
||||
RequestOptions.DEFAULT);
|
||||
assertThat(activateWatchResponse1.getStatus().state().isActive(), is(true));
|
||||
|
||||
ActivateWatchResponse activateWatchResponse2 = highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId),
|
||||
RequestOptions.DEFAULT);
|
||||
assertThat(activateWatchResponse2.getStatus().state().isActive(), is(true));
|
||||
assertThat(activateWatchResponse1.getStatus().state().getTimestamp(),
|
||||
lessThan(activateWatchResponse2.getStatus().state().getTimestamp()));
|
||||
}
|
||||
|
||||
public void testActivateWatchThatDoesNotExist() throws Exception {
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
// exception when activating a not existing watcher
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () ->
|
||||
highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId), RequestOptions.DEFAULT));
|
||||
assertEquals(RestStatus.NOT_FOUND, exception.status());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -97,4 +98,14 @@ public class WatcherRequestConvertersTests extends ESTestCase {
|
|||
assertEquals(expectedEndpoint.toString(), request.getEndpoint());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testActivateWatchRequestConversion() {
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
ActivateWatchRequest activateWatchRequest = new ActivateWatchRequest(watchId);
|
||||
|
||||
Request request = WatcherRequestConverters.activateWatch(activateWatchRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/watcher/watch/" + watchId + "/_activate", request.getEndpoint());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -176,7 +176,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// <3>
|
||||
}
|
||||
if (shardInfo.getFailed() > 0) {
|
||||
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
|
||||
for (ReplicationResponse.ShardInfo.Failure failure :
|
||||
shardInfo.getFailures()) {
|
||||
String reason = failure.reason(); // <4>
|
||||
}
|
||||
}
|
||||
|
@ -239,8 +240,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
IndexRequest request = new IndexRequest("posts", "doc", "async").source("field", "value");
|
||||
ActionListener<IndexResponse> listener;
|
||||
// tag::index-execute-listener
|
||||
ActionListener<IndexResponse> listener = new ActionListener<IndexResponse>() {
|
||||
listener = new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse indexResponse) {
|
||||
// <1>
|
||||
|
@ -305,8 +307,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
request = new UpdateRequest("posts", "doc", "1").fetchSource(true);
|
||||
//tag::update-request-with-stored-script
|
||||
Script stored =
|
||||
new Script(ScriptType.STORED, null, "increment-field", parameters); // <1>
|
||||
Script stored = new Script(
|
||||
ScriptType.STORED, null, "increment-field", parameters); // <1>
|
||||
request.script(stored); // <2>
|
||||
//end::update-request-with-stored-script
|
||||
updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
|
@ -359,7 +361,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
//end::update-request-with-doc-as-string
|
||||
request.fetchSource(true);
|
||||
// tag::update-execute
|
||||
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
UpdateResponse updateResponse = client.update(
|
||||
request, RequestOptions.DEFAULT);
|
||||
// end::update-execute
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
|
||||
|
@ -397,7 +400,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// <1>
|
||||
}
|
||||
if (shardInfo.getFailed() > 0) {
|
||||
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
|
||||
for (ReplicationResponse.ShardInfo.Failure failure :
|
||||
shardInfo.getFailures()) {
|
||||
String reason = failure.reason(); // <2>
|
||||
}
|
||||
}
|
||||
|
@ -408,7 +412,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
UpdateRequest request = new UpdateRequest("posts", "type", "does_not_exist")
|
||||
.doc("field", "value");
|
||||
try {
|
||||
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
UpdateResponse updateResponse = client.update(
|
||||
request, RequestOptions.DEFAULT);
|
||||
} catch (ElasticsearchException e) {
|
||||
if (e.status() == RestStatus.NOT_FOUND) {
|
||||
// <1>
|
||||
|
@ -422,7 +427,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
.doc("field", "value")
|
||||
.version(1);
|
||||
try {
|
||||
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
UpdateResponse updateResponse = client.update(
|
||||
request, RequestOptions.DEFAULT);
|
||||
} catch(ElasticsearchException e) {
|
||||
if (e.status() == RestStatus.CONFLICT) {
|
||||
// <1>
|
||||
|
@ -445,7 +451,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
//tag::update-request-source-include
|
||||
String[] includes = new String[]{"updated", "r*"};
|
||||
String[] excludes = Strings.EMPTY_ARRAY;
|
||||
request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1>
|
||||
request.fetchSource(
|
||||
new FetchSourceContext(true, includes, excludes)); // <1>
|
||||
//end::update-request-source-include
|
||||
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
|
@ -459,7 +466,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
//tag::update-request-source-exclude
|
||||
String[] includes = Strings.EMPTY_ARRAY;
|
||||
String[] excludes = new String[]{"updated"};
|
||||
request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1>
|
||||
request.fetchSource(
|
||||
new FetchSourceContext(true, includes, excludes)); // <1>
|
||||
//end::update-request-source-exclude
|
||||
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
|
@ -508,8 +516,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
{
|
||||
UpdateRequest request = new UpdateRequest("posts", "doc", "async").doc("reason", "async update").docAsUpsert(true);
|
||||
|
||||
ActionListener<UpdateResponse> listener;
|
||||
// tag::update-execute-listener
|
||||
ActionListener<UpdateResponse> listener = new ActionListener<UpdateResponse>() {
|
||||
listener = new ActionListener<UpdateResponse>() {
|
||||
@Override
|
||||
public void onResponse(UpdateResponse updateResponse) {
|
||||
// <1>
|
||||
|
@ -548,12 +557,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::delete-request
|
||||
DeleteRequest request = new DeleteRequest(
|
||||
"posts", // <1>
|
||||
"doc", // <2>
|
||||
"1"); // <3>
|
||||
"doc", // <2>
|
||||
"1"); // <3>
|
||||
// end::delete-request
|
||||
|
||||
// tag::delete-execute
|
||||
DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
|
||||
DeleteResponse deleteResponse = client.delete(
|
||||
request, RequestOptions.DEFAULT);
|
||||
// end::delete-execute
|
||||
assertSame(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
|
||||
|
@ -567,7 +577,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// <1>
|
||||
}
|
||||
if (shardInfo.getFailed() > 0) {
|
||||
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
|
||||
for (ReplicationResponse.ShardInfo.Failure failure :
|
||||
shardInfo.getFailures()) {
|
||||
String reason = failure.reason(); // <2>
|
||||
}
|
||||
}
|
||||
|
@ -598,7 +609,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
{
|
||||
// tag::delete-notfound
|
||||
DeleteRequest request = new DeleteRequest("posts", "doc", "does_not_exist");
|
||||
DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
|
||||
DeleteResponse deleteResponse = client.delete(
|
||||
request, RequestOptions.DEFAULT);
|
||||
if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) {
|
||||
// <1>
|
||||
}
|
||||
|
@ -612,8 +624,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
// tag::delete-conflict
|
||||
try {
|
||||
DeleteRequest request = new DeleteRequest("posts", "doc", "1").version(2);
|
||||
DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
|
||||
DeleteResponse deleteResponse = client.delete(
|
||||
new DeleteRequest("posts", "doc", "1").version(2),
|
||||
RequestOptions.DEFAULT);
|
||||
} catch (ElasticsearchException exception) {
|
||||
if (exception.status() == RestStatus.CONFLICT) {
|
||||
// <1>
|
||||
|
@ -628,8 +641,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
DeleteRequest request = new DeleteRequest("posts", "doc", "async");
|
||||
|
||||
ActionListener<DeleteResponse> listener;
|
||||
// tag::delete-execute-listener
|
||||
ActionListener<DeleteResponse> listener = new ActionListener<DeleteResponse>() {
|
||||
listener = new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
// <1>
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -25,6 +25,8 @@ import org.elasticsearch.client.Request;
|
|||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchResponse;
|
||||
import org.elasticsearch.client.watcher.ActionStatus;
|
||||
|
@ -160,9 +162,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
{
|
||||
//tag::ack-watch-execute
|
||||
//tag::ack-watch-request
|
||||
AckWatchRequest request = new AckWatchRequest("my_watch_id", // <1>
|
||||
"logme", "emailme"); // <2>
|
||||
//end::ack-watch-request
|
||||
|
||||
//tag::ack-watch-execute
|
||||
AckWatchResponse response = client.watcher().ackWatch(request, RequestOptions.DEFAULT);
|
||||
//end::ack-watch-execute
|
||||
|
||||
|
@ -203,4 +208,60 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testActivateWatch() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
BytesReference watch = new BytesArray("{ \n" +
|
||||
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
|
||||
" \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" +
|
||||
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
|
||||
"}");
|
||||
PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
|
||||
request.setActive(false); // <1>
|
||||
PutWatchResponse response = client.watcher().putWatch(request, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
{
|
||||
//tag::activate-watch-request
|
||||
ActivateWatchRequest request = new ActivateWatchRequest("my_watch_id");
|
||||
ActivateWatchResponse response = client.watcher().activateWatch(request, RequestOptions.DEFAULT);
|
||||
//end::activate-watch-request
|
||||
|
||||
//tag::activate-watch-response
|
||||
WatchStatus watchStatus = response.getStatus(); // <1>
|
||||
//end::activate-watch-response
|
||||
|
||||
assertTrue(watchStatus.state().isActive());
|
||||
}
|
||||
|
||||
{
|
||||
ActivateWatchRequest request = new ActivateWatchRequest("my_watch_id");
|
||||
//tag::activate-watch-request-listener
|
||||
ActionListener<ActivateWatchResponse> listener = new ActionListener<ActivateWatchResponse>() {
|
||||
@Override
|
||||
public void onResponse(ActivateWatchResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::activate-watch-request-listener
|
||||
|
||||
//Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
//tag::activate-watch-request-async
|
||||
client.watcher().activateWatchAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
//end::activate-watch-request-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,12 +34,4 @@ public class DeleteJobRequestTests extends ESTestCase {
|
|||
ex = expectThrows(NullPointerException.class, () -> createTestInstance().setJobId(null));
|
||||
assertEquals("[job_id] must not be null", ex.getMessage());
|
||||
}
|
||||
|
||||
public void test_WithForce() {
|
||||
DeleteJobRequest deleteJobRequest = createTestInstance();
|
||||
assertFalse(deleteJobRequest.isForce());
|
||||
|
||||
deleteJobRequest.setForce(true);
|
||||
assertTrue(deleteJobRequest.isForce());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DeleteJobResponseTests extends AbstractXContentTestCase<DeleteJobResponse> {
|
||||
|
||||
@Override
|
||||
protected DeleteJobResponse createTestInstance() {
|
||||
if (randomBoolean()) {
|
||||
return new DeleteJobResponse(randomBoolean(), null);
|
||||
}
|
||||
return new DeleteJobResponse(null, new TaskId(randomAlphaOfLength(20) + ":" + randomIntBetween(1, 100)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return DeleteJobResponse.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class GetDatafeedStatsRequestTests extends AbstractXContentTestCase<GetDatafeedStatsRequest> {
|
||||
|
||||
public void testAllDatafeedsRequest() {
|
||||
GetDatafeedStatsRequest request = GetDatafeedStatsRequest.getAllDatafeedStatsRequest();
|
||||
|
||||
assertEquals(request.getDatafeedIds().size(), 1);
|
||||
assertEquals(request.getDatafeedIds().get(0), "_all");
|
||||
}
|
||||
|
||||
public void testNewWithDatafeedId() {
|
||||
Exception exception = expectThrows(NullPointerException.class, () -> new GetDatafeedStatsRequest("datafeed", null));
|
||||
assertEquals(exception.getMessage(), "datafeedIds must not contain null values");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetDatafeedStatsRequest createTestInstance() {
|
||||
int datafeedCount = randomIntBetween(0, 10);
|
||||
List<String> datafeedIds = new ArrayList<>(datafeedCount);
|
||||
|
||||
for (int i = 0; i < datafeedCount; i++) {
|
||||
datafeedIds.add(randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
GetDatafeedStatsRequest request = new GetDatafeedStatsRequest(datafeedIds);
|
||||
|
||||
if (randomBoolean()) {
|
||||
request.setAllowNoDatafeeds(randomBoolean());
|
||||
}
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetDatafeedStatsRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return GetDatafeedStatsRequest.PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedStatsTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class GetDatafeedStatsResponseTests extends AbstractXContentTestCase<GetDatafeedStatsResponse> {
|
||||
|
||||
@Override
|
||||
protected GetDatafeedStatsResponse createTestInstance() {
|
||||
|
||||
int count = randomIntBetween(1, 5);
|
||||
List<DatafeedStats> results = new ArrayList<>(count);
|
||||
for(int i = 0; i < count; i++) {
|
||||
results.add(DatafeedStatsTests.createRandomInstance());
|
||||
}
|
||||
|
||||
return new GetDatafeedStatsResponse(results, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetDatafeedStatsResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return GetDatafeedStatsResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> !field.isEmpty();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PreviewDatafeedRequestTests extends AbstractXContentTestCase<PreviewDatafeedRequest> {
|
||||
|
||||
@Override
|
||||
protected PreviewDatafeedRequest createTestInstance() {
|
||||
return new PreviewDatafeedRequest(DatafeedConfigTests.randomValidDatafeedId());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PreviewDatafeedRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return PreviewDatafeedRequest.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
|
||||
public class PreviewDatafeedResponseTests extends ESTestCase {
|
||||
|
||||
protected PreviewDatafeedResponse createTestInstance() throws IOException {
|
||||
//This is just to create a random object to stand in the place of random data
|
||||
DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandom();
|
||||
BytesReference bytes = XContentHelper.toXContent(datafeedConfig, XContentType.JSON, false);
|
||||
return new PreviewDatafeedResponse(bytes);
|
||||
}
|
||||
|
||||
public void testGetDataList() throws IOException {
|
||||
String rawData = "[\n" +
|
||||
" {\n" +
|
||||
" \"time\": 1454803200000,\n" +
|
||||
" \"airline\": \"JZA\",\n" +
|
||||
" \"doc_count\": 5,\n" +
|
||||
" \"responsetime\": 990.4628295898438\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" \"time\": 1454803200000,\n" +
|
||||
" \"airline\": \"JBU\",\n" +
|
||||
" \"doc_count\": 23,\n" +
|
||||
" \"responsetime\": 877.5927124023438\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" \"time\": 1454803200000,\n" +
|
||||
" \"airline\": \"KLM\",\n" +
|
||||
" \"doc_count\": 42,\n" +
|
||||
" \"responsetime\": 1355.481201171875\n" +
|
||||
" }\n" +
|
||||
"]";
|
||||
BytesReference bytes = new BytesArray(rawData);
|
||||
PreviewDatafeedResponse response = new PreviewDatafeedResponse(bytes);
|
||||
assertThat(response.getDataList()
|
||||
.stream()
|
||||
.map(map -> (String)map.get("airline"))
|
||||
.collect(Collectors.toList()), containsInAnyOrder("JZA", "JBU", "KLM"));
|
||||
|
||||
rawData = "{\"key\":\"my_value\"}";
|
||||
bytes = new BytesArray(rawData);
|
||||
response = new PreviewDatafeedResponse(bytes);
|
||||
assertThat(response.getDataList()
|
||||
.stream()
|
||||
.map(map -> (String)map.get("key"))
|
||||
.collect(Collectors.toList()), containsInAnyOrder("my_value"));
|
||||
|
||||
}
|
||||
|
||||
//Because this is raw a BytesReference, the shuffling done via `AbstractXContentTestCase` is unacceptable and causes equality failures
|
||||
public void testSerializationDeserialization() throws IOException {
|
||||
for (int runs = 0; runs < 20; runs++) {
|
||||
XContentType xContentType = XContentType.JSON;
|
||||
PreviewDatafeedResponse testInstance = createTestInstance();
|
||||
BytesReference originalXContent = XContentHelper.toXContent(testInstance, xContentType, false);
|
||||
XContentParser parser = this.createParser(xContentType.xContent(), originalXContent);
|
||||
PreviewDatafeedResponse parsed = PreviewDatafeedResponse.fromXContent(parser);
|
||||
assertEquals(testInstance, parsed);
|
||||
assertToXContentEquivalent(
|
||||
XContentHelper.toXContent(testInstance, xContentType, false),
|
||||
XContentHelper.toXContent(parsed, xContentType, false),
|
||||
xContentType);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.client.ml.NodeAttributes;
|
||||
import org.elasticsearch.client.ml.NodeAttributesTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class DatafeedStatsTests extends AbstractXContentTestCase<DatafeedStats> {
|
||||
|
||||
public static DatafeedStats createRandomInstance() {
|
||||
String datafeedId = DatafeedConfigTests.randomValidDatafeedId();
|
||||
DatafeedState datafeedState =
|
||||
randomFrom(DatafeedState.STARTED, DatafeedState.STARTING, DatafeedState.STOPPED, DatafeedState.STOPPING);
|
||||
NodeAttributes nodeAttributes = null;
|
||||
if (randomBoolean()) {
|
||||
NodeAttributes randomAttributes = NodeAttributesTests.createRandom();
|
||||
int numberOfAttributes = randomIntBetween(1, 10);
|
||||
Map<String, String> attributes = new HashMap<>(numberOfAttributes);
|
||||
for(int i = 0; i < numberOfAttributes; i++) {
|
||||
String val = randomAlphaOfLength(10);
|
||||
attributes.put("ml.key-"+i, val);
|
||||
}
|
||||
nodeAttributes = new NodeAttributes(randomAttributes.getId(),
|
||||
randomAttributes.getName(),
|
||||
randomAttributes.getEphemeralId(),
|
||||
randomAttributes.getTransportAddress(),
|
||||
attributes);
|
||||
}
|
||||
String assignmentReason = randomBoolean() ? randomAlphaOfLength(10) : null;
|
||||
return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DatafeedStats createTestInstance() {
|
||||
return createRandomInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DatafeedStats doParseInstance(XContentParser parser) throws IOException {
|
||||
return DatafeedStats.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> field.equals("node.attributes");
|
||||
}
|
||||
}
|
|
@ -34,9 +34,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class JobTests extends AbstractXContentTestCase<Job> {
|
||||
|
||||
|
@ -77,93 +75,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
|
|||
assertNotNull(Job.PARSER.apply(parser, null).build());
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
Job job = buildJobBuilder("foo").build();
|
||||
assertFalse(job.equals("a string"));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentIds() {
|
||||
Date createTime = new Date();
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setCreateTime(createTime);
|
||||
Job job1 = builder.build();
|
||||
builder.setId("bar");
|
||||
Job job2 = builder.build();
|
||||
assertFalse(job1.equals(job2));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentRenormalizationWindowDays() {
|
||||
Date date = new Date();
|
||||
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails1.setRenormalizationWindowDays(3L);
|
||||
jobDetails1.setCreateTime(date);
|
||||
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails2.setRenormalizationWindowDays(4L);
|
||||
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails2.setCreateTime(date);
|
||||
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentBackgroundPersistInterval() {
|
||||
Date date = new Date();
|
||||
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails1.setBackgroundPersistInterval(TimeValue.timeValueSeconds(10000L));
|
||||
jobDetails1.setCreateTime(date);
|
||||
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails2.setBackgroundPersistInterval(TimeValue.timeValueSeconds(8000L));
|
||||
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails2.setCreateTime(date);
|
||||
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentModelSnapshotRetentionDays() {
|
||||
Date date = new Date();
|
||||
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails1.setModelSnapshotRetentionDays(10L);
|
||||
jobDetails1.setCreateTime(date);
|
||||
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails2.setModelSnapshotRetentionDays(8L);
|
||||
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails2.setCreateTime(date);
|
||||
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentResultsRetentionDays() {
|
||||
Date date = new Date();
|
||||
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails1.setCreateTime(date);
|
||||
jobDetails1.setResultsRetentionDays(30L);
|
||||
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||
jobDetails2.setResultsRetentionDays(4L);
|
||||
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||
jobDetails2.setCreateTime(date);
|
||||
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentCustomSettings() {
|
||||
Job.Builder jobDetails1 = buildJobBuilder("foo");
|
||||
Map<String, Object> customSettings1 = new HashMap<>();
|
||||
customSettings1.put("key1", "value1");
|
||||
jobDetails1.setCustomSettings(customSettings1);
|
||||
Job.Builder jobDetails2 = buildJobBuilder("foo");
|
||||
Map<String, Object> customSettings2 = new HashMap<>();
|
||||
customSettings2.put("key2", "value2");
|
||||
jobDetails2.setCustomSettings(customSettings2);
|
||||
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||
}
|
||||
|
||||
public void testCopyConstructor() {
|
||||
for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) {
|
||||
Job job = createTestInstance();
|
||||
|
@ -184,20 +95,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
|
|||
assertEquals("[job_type] must not be null", ex.getMessage());
|
||||
}
|
||||
|
||||
public static Job.Builder buildJobBuilder(String id, Date date) {
|
||||
Job.Builder builder = new Job.Builder(id);
|
||||
builder.setCreateTime(date);
|
||||
AnalysisConfig.Builder ac = createAnalysisConfig();
|
||||
DataDescription.Builder dc = new DataDescription.Builder();
|
||||
builder.setAnalysisConfig(ac);
|
||||
builder.setDataDescription(dc);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static Job.Builder buildJobBuilder(String id) {
|
||||
return buildJobBuilder(id, new Date());
|
||||
}
|
||||
|
||||
public static String randomValidJobId() {
|
||||
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
|
||||
return generator.ofCodePointsLength(random(), 10, 10);
|
||||
|
@ -228,9 +125,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
|
|||
if (randomBoolean()) {
|
||||
builder.setFinishedTime(new Date(randomNonNegativeLong()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setLastDataTime(new Date(randomNonNegativeLong()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setEstablishedModelMemory(randomNonNegativeLong());
|
||||
}
|
||||
|
@ -265,6 +159,9 @@ public class JobTests extends AbstractXContentTestCase<Job> {
|
|||
if (randomBoolean()) {
|
||||
builder.setResultsIndexName(randomValidJobId());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setDeleting(randomBoolean());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,9 @@ public class AnomalyRecordTests extends AbstractXContentTestCase<AnomalyRecord>
|
|||
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setProbability(randomDouble());
|
||||
if (randomBoolean()) {
|
||||
anomalyRecord.setMultiBucketImpact(randomDouble());
|
||||
}
|
||||
anomalyRecord.setRecordScore(randomDouble());
|
||||
anomalyRecord.setInitialRecordScore(randomDouble());
|
||||
anomalyRecord.setInterim(randomBoolean());
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.XContentTestUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* Basic unit tests for {@link ActivateWatchResponse}.
|
||||
*
|
||||
* Note that we only sanity check watch status parsing here, as there
|
||||
* are dedicated tests for it in {@link WatchStatusTests}.
|
||||
*/
|
||||
public class ActivateWatchResponseTests extends ESTestCase {
|
||||
|
||||
public void testBasicParsing() throws IOException {
|
||||
XContentType contentType = randomFrom(XContentType.values());
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
|
||||
.startObject("status")
|
||||
.field("version", 42)
|
||||
.field("execution_state", ExecutionState.ACKNOWLEDGED)
|
||||
.startObject("state")
|
||||
.field("active", false)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
BytesReference bytes = BytesReference.bytes(builder);
|
||||
|
||||
ActivateWatchResponse response = parse(builder.contentType(), bytes);
|
||||
WatchStatus status = response.getStatus();
|
||||
assertNotNull(status);
|
||||
assertEquals(42, status.version());
|
||||
assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
|
||||
assertFalse(status.state().isActive());
|
||||
}
|
||||
|
||||
public void testParsingWithMissingStatus() throws IOException {
|
||||
XContentType contentType = randomFrom(XContentType.values());
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject().endObject();
|
||||
BytesReference bytes = BytesReference.bytes(builder);
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> parse(builder.contentType(), bytes));
|
||||
}
|
||||
|
||||
public void testParsingWithNullStatus() throws IOException {
|
||||
XContentType contentType = randomFrom(XContentType.values());
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
|
||||
.nullField("status")
|
||||
.endObject();
|
||||
BytesReference bytes = BytesReference.bytes(builder);
|
||||
|
||||
expectThrows(XContentParseException.class, () -> parse(builder.contentType(), bytes));
|
||||
}
|
||||
|
||||
public void testParsingWithUnknownKeys() throws IOException {
|
||||
XContentType contentType = randomFrom(XContentType.values());
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
|
||||
.startObject("status")
|
||||
.field("version", 42)
|
||||
.field("execution_state", ExecutionState.ACKNOWLEDGED)
|
||||
.startObject("state")
|
||||
.field("active", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
BytesReference bytes = BytesReference.bytes(builder);
|
||||
|
||||
Predicate<String> excludeFilter = field -> field.equals("status.actions");
|
||||
BytesReference bytesWithRandomFields = XContentTestUtils.insertRandomFields(
|
||||
builder.contentType(), bytes, excludeFilter, random());
|
||||
|
||||
ActivateWatchResponse response = parse(builder.contentType(), bytesWithRandomFields);
|
||||
WatchStatus status = response.getStatus();
|
||||
assertNotNull(status);
|
||||
assertEquals(42, status.version());
|
||||
assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
|
||||
assertTrue(status.state().isActive());
|
||||
}
|
||||
|
||||
private ActivateWatchResponse parse(XContentType contentType, BytesReference bytes) throws IOException {
|
||||
XContentParser parser = XContentFactory.xContent(contentType)
|
||||
.createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput());
|
||||
parser.nextToken();
|
||||
return ActivateWatchResponse.fromXContent(parser);
|
||||
}
|
||||
}
|
|
@ -124,9 +124,9 @@ public class Response {
|
|||
final Matcher matcher = WARNING_HEADER_PATTERN.matcher(warning);
|
||||
if (matcher.matches()) {
|
||||
warnings.add(matcher.group(1));
|
||||
continue;
|
||||
} else {
|
||||
warnings.add(warning);
|
||||
}
|
||||
warnings.add(warning);
|
||||
}
|
||||
return warnings;
|
||||
}
|
||||
|
|
|
@ -59,6 +59,7 @@ import java.net.URI;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
@ -70,10 +71,12 @@ import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
|||
import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes;
|
||||
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
|
||||
import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
@ -96,6 +99,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
private Node node;
|
||||
private CloseableHttpAsyncClient httpClient;
|
||||
private HostsTrackingFailureListener failureListener;
|
||||
private boolean strictDeprecationMode;
|
||||
|
||||
@Before
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -147,8 +151,9 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
|
||||
node = new Node(new HttpHost("localhost", 9200));
|
||||
failureListener = new HostsTrackingFailureListener();
|
||||
strictDeprecationMode = randomBoolean();
|
||||
restClient = new RestClient(httpClient, 10000, defaultHeaders,
|
||||
singletonList(node), null, failureListener, NodeSelector.ANY, false);
|
||||
singletonList(node), null, failureListener, NodeSelector.ANY, strictDeprecationMode);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -331,9 +336,54 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
}
|
||||
assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode));
|
||||
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), Collections.<String>emptySet());
|
||||
assertFalse(esResponse.hasWarnings());
|
||||
}
|
||||
}
|
||||
|
||||
public void testDeprecationWarnings() throws IOException {
|
||||
String chars = randomAsciiAlphanumOfLength(5);
|
||||
assertDeprecationWarnings(singletonList("poorly formatted " + chars), singletonList("poorly formatted " + chars));
|
||||
assertDeprecationWarnings(singletonList(formatWarning(chars)), singletonList(chars));
|
||||
assertDeprecationWarnings(
|
||||
Arrays.asList(formatWarning(chars), "another one", "and another"),
|
||||
Arrays.asList(chars, "another one", "and another"));
|
||||
|
||||
}
|
||||
|
||||
private void assertDeprecationWarnings(List<String> warningHeaderTexts, List<String> warningBodyTexts) throws IOException {
|
||||
String method = randomFrom(getHttpMethods());
|
||||
Request request = new Request(method, "/200");
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
for (String warningHeaderText : warningHeaderTexts) {
|
||||
options.addHeader("Warning", warningHeaderText);
|
||||
}
|
||||
request.setOptions(options);
|
||||
|
||||
Response response;
|
||||
if (strictDeprecationMode) {
|
||||
try {
|
||||
restClient.performRequest(request);
|
||||
fail("expected ResponseException because strict deprecation mode is enabled");
|
||||
return;
|
||||
} catch (ResponseException e) {
|
||||
assertThat(e.getMessage(), containsString("\nWarnings: " + warningBodyTexts));
|
||||
response = e.getResponse();
|
||||
}
|
||||
} else {
|
||||
response = restClient.performRequest(request);
|
||||
}
|
||||
assertTrue(response.hasWarnings());
|
||||
assertEquals(warningBodyTexts, response.getWarnings());
|
||||
}
|
||||
|
||||
/**
|
||||
* Emulates Elasticsearch's DeprecationLogger.formatWarning in simple
|
||||
* cases. We don't have that available because we're testing against 1.7.
|
||||
*/
|
||||
private static String formatWarning(String warningBody) {
|
||||
return "299 Elasticsearch-1.2.2-SNAPSHOT-eeeeeee \"" + warningBody + "\" \"Mon, 01 Jan 2001 00:00:00 GMT\"";
|
||||
}
|
||||
|
||||
private HttpUriRequest performRandomRequest(String method) throws Exception {
|
||||
String uriAsString = "/" + randomStatusCode(getRandom());
|
||||
Request request = new Request(method, uriAsString);
|
||||
|
|
|
@ -149,27 +149,39 @@ subprojects {
|
|||
|
||||
task buildBwcVersion(type: Exec) {
|
||||
dependsOn checkoutBwcBranch, writeBuildMetadata
|
||||
// send RUNTIME_JAVA_HOME so the build doesn't fails on newer version the branch doesn't know about
|
||||
environment('RUNTIME_JAVA_HOME', getJavaHome(it, rootProject.ext.minimumRuntimeVersion.getMajorVersion() as int))
|
||||
workingDir = checkoutDir
|
||||
// we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds
|
||||
if (["5.6", "6.0", "6.1"].contains(bwcBranch)) {
|
||||
environment('JAVA_HOME', getJavaHome(it, 8))
|
||||
} else if ("6.2".equals(bwcBranch)) {
|
||||
environment('JAVA_HOME', getJavaHome(it, 9))
|
||||
} else if (["6.3", "6.4"].contains(bwcBranch)) {
|
||||
environment('JAVA_HOME', getJavaHome(it, 10))
|
||||
} else if (["6.x"].contains(bwcBranch)) {
|
||||
environment('JAVA_HOME', getJavaHome(it, 11))
|
||||
} else {
|
||||
environment('JAVA_HOME', project.compilerJavaHome)
|
||||
doFirst {
|
||||
// Execution time so that the checkouts are available
|
||||
List<String> lines = file("$checkoutDir/.ci/java-versions.properties").readLines()
|
||||
environment(
|
||||
'JAVA_HOME',
|
||||
getJavaHome(it, Integer.parseInt(
|
||||
lines
|
||||
.findAll({ it.startsWith("ES_BUILD_JAVA=java") })
|
||||
.collect({ it.replace("ES_BUILD_JAVA=java", "").trim() })
|
||||
.join("!!")
|
||||
))
|
||||
)
|
||||
environment(
|
||||
'RUNTIME_JAVA_HOME',
|
||||
getJavaHome(it, Integer.parseInt(
|
||||
lines
|
||||
.findAll({ it.startsWith("ES_RUNTIME_JAVA=java") })
|
||||
.collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() })
|
||||
.join("!!")
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'cmd'
|
||||
args '/C', 'call', new File(checkoutDir, 'gradlew').toString()
|
||||
} else {
|
||||
executable new File(checkoutDir, 'gradlew').toString()
|
||||
}
|
||||
if (gradle.startParameter.isOffline()) {
|
||||
args "--offline"
|
||||
}
|
||||
for (String dir : projectDirs) {
|
||||
args ":${dir.replace('/', ':')}:assemble"
|
||||
}
|
||||
|
@ -237,4 +249,4 @@ class IndentingOutputStream extends OutputStream {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -228,7 +228,7 @@ final class JvmOptionsParser {
|
|||
// no range is present, apply the JVM option to the specified major version only
|
||||
upper = lower;
|
||||
} else if (end == null) {
|
||||
// a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specifed one
|
||||
// a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specified one
|
||||
upper = Integer.MAX_VALUE;
|
||||
} else {
|
||||
// a range of the form \\d+-\\d+ is present, apply the JVM option to the specified range of major versions
|
||||
|
|
|
@ -63,6 +63,8 @@ for its modifiers:
|
|||
* `// TESTRESPONSE[_cat]`: Add substitutions for testing `_cat` responses. Use
|
||||
this after all other substitutions so it doesn't make other substitutions
|
||||
difficult.
|
||||
* `// TESTRESPONSE[skip:reason]`: Skip the assertions specified by this
|
||||
response.
|
||||
* `// TESTSETUP`: Marks this snippet as the "setup" for all other snippets in
|
||||
this file. This is a somewhat natural way of structuring documentation. You
|
||||
say "this is the data we use to explain this feature" then you add the
|
||||
|
@ -73,6 +75,10 @@ for its modifiers:
|
|||
right in the documentation file. In general, we should prefer `// TESTSETUP`
|
||||
over `// TEST[setup:name]` because it makes it more clear what steps have to
|
||||
be taken before the examples will work.
|
||||
* `// NOTCONSOLE`: Marks this snippet as neither `// CONSOLE` nor
|
||||
`// TESTRESPONSE`, excluding it from the list of unconverted snippets. We
|
||||
should only use this for snippets that *are* JSON but are *not* responses or
|
||||
requests.
|
||||
|
||||
In addition to the standard CONSOLE syntax these snippets can contain blocks
|
||||
of yaml surrounded by markers like this:
|
||||
|
|
|
@ -1,14 +1,20 @@
|
|||
[[java-rest-high-document-delete]]
|
||||
--
|
||||
:api: delete
|
||||
:request: DeleteRequest
|
||||
:response: DeleteResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Delete API
|
||||
|
||||
[[java-rest-high-document-delete-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Delete Request
|
||||
|
||||
A `DeleteRequest` requires the following arguments:
|
||||
A +{request}+ requires the following arguments:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Index
|
||||
<2> Type
|
||||
|
@ -19,82 +25,47 @@ The following arguments can optionally be provided:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-routing]
|
||||
include-tagged::{doc-tests-file}[{api}-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> Routing value
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-timeout]
|
||||
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for primary shard to become available as a `TimeValue`
|
||||
<2> Timeout to wait for primary shard to become available as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Refresh policy as a `WriteRequest.RefreshPolicy` instance
|
||||
<2> Refresh policy as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-version]
|
||||
include-tagged::{doc-tests-file}[{api}-request-version]
|
||||
--------------------------------------------------
|
||||
<1> Version
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-version-type]
|
||||
include-tagged::{doc-tests-file}[{api}-request-version-type]
|
||||
--------------------------------------------------
|
||||
<1> Version type
|
||||
|
||||
[[java-rest-high-document-delete-sync]]
|
||||
==== Synchronous Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-delete-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a delete request requires both the `DeleteRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `DeleteRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `DeleteResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-document-delete-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Delete Response
|
||||
|
||||
The returned `DeleteResponse` allows to retrieve information about the executed
|
||||
The returned +{response}+ allows to retrieve information about the executed
|
||||
operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Handle the situation where number of successful shards is less than
|
||||
total shards
|
||||
|
@ -105,7 +76,7 @@ It is also possible to check whether the document was found or not:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-notfound]
|
||||
include-tagged::{doc-tests-file}[{api}-notfound]
|
||||
--------------------------------------------------
|
||||
<1> Do something if the document to be deleted was not found
|
||||
|
||||
|
@ -114,7 +85,7 @@ be thrown:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-conflict]
|
||||
include-tagged::{doc-tests-file}[{api}-conflict]
|
||||
--------------------------------------------------
|
||||
<1> The raised exception indicates that a version conflict error was returned
|
||||
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
[[java-rest-high-document-exists]]
|
||||
--
|
||||
:api: exists
|
||||
:request: GetRequest
|
||||
:response: boolean
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Exists API
|
||||
|
||||
The exists API returns `true` if a document exists, and `false` otherwise.
|
||||
|
||||
[[java-rest-high-document-exists-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Exists Request
|
||||
|
||||
It uses `GetRequest` just like the <<java-rest-high-document-get>>.
|
||||
It uses +{request}+ just like the <<java-rest-high-document-get>>.
|
||||
All of its <<java-rest-high-document-get-request-optional-arguments, optional arguments>>
|
||||
are supported. Since `exists()` only returns `true` or `false`, we recommend
|
||||
turning off fetching `_source` and any stored fields so the request is
|
||||
|
@ -14,7 +20,7 @@ slightly lighter:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Index
|
||||
<2> Type
|
||||
|
@ -22,39 +28,4 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-request]
|
|||
<4> Disable fetching `_source`.
|
||||
<5> Disable fetching stored fields.
|
||||
|
||||
[[java-rest-high-document-exists-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-exists-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of exists request requires both the `GetRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `GetResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,44 +1,50 @@
|
|||
[[java-rest-high-document-get]]
|
||||
--
|
||||
:api: get
|
||||
:request: GetRequest
|
||||
:response: GetResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Get API
|
||||
|
||||
[[java-rest-high-document-get-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Request
|
||||
|
||||
A `GetRequest` requires the following arguments:
|
||||
A +{request}+ requires the following arguments:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Index
|
||||
<2> Type
|
||||
<3> Document id
|
||||
|
||||
[[java-rest-high-document-get-request-optional-arguments]]
|
||||
[id="{upid}-{api}-request-optional-arguments"]
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-no-source]
|
||||
include-tagged::{doc-tests-file}[{api}-request-no-source]
|
||||
--------------------------------------------------
|
||||
<1> Disable source retrieval, enabled by default
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-source-include]
|
||||
include-tagged::{doc-tests-file}[{api}-request-source-include]
|
||||
--------------------------------------------------
|
||||
<1> Configure source inclusion for specific fields
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-source-exclude]
|
||||
include-tagged::{doc-tests-file}[{api}-request-source-exclude]
|
||||
--------------------------------------------------
|
||||
<1> Configure source exclusion for specific fields
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-stored]
|
||||
include-tagged::{doc-tests-file}[{api}-request-stored]
|
||||
--------------------------------------------------
|
||||
<1> Configure retrieval for specific stored fields (requires fields to be
|
||||
stored separately in the mappings)
|
||||
|
@ -47,92 +53,57 @@ separately in the mappings)
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-routing]
|
||||
include-tagged::{doc-tests-file}[{api}-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> Routing value
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-preference]
|
||||
include-tagged::{doc-tests-file}[{api}-request-preference]
|
||||
--------------------------------------------------
|
||||
<1> Preference value
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-realtime]
|
||||
include-tagged::{doc-tests-file}[{api}-request-realtime]
|
||||
--------------------------------------------------
|
||||
<1> Set realtime flag to `false` (`true` by default)
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Perform a refresh before retrieving the document (`false` by default)
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-version]
|
||||
include-tagged::{doc-tests-file}[{api}-request-version]
|
||||
--------------------------------------------------
|
||||
<1> Version
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-version-type]
|
||||
include-tagged::{doc-tests-file}[{api}-request-version-type]
|
||||
--------------------------------------------------
|
||||
<1> Version type
|
||||
|
||||
[[java-rest-high-document-get-sync]]
|
||||
==== Synchronous Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-get-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a get request requires both the `GetRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `GetResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
||||
|
||||
[[java-rest-high-document-get-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Response
|
||||
|
||||
The returned `GetResponse` allows to retrieve the requested document along with
|
||||
The returned +{response}+ allows to retrieve the requested document along with
|
||||
its metadata and eventually stored fields.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Retrieve the document as a `String`
|
||||
<2> Retrieve the document as a `Map<String, Object>`
|
||||
<3> Retrieve the document as a `byte[]`
|
||||
<4> Handle the scenario where the document was not found. Note that although
|
||||
the returned response has `404` status code, a valid `GetResponse` is
|
||||
the returned response has `404` status code, a valid +{response}+ is
|
||||
returned rather than an exception thrown. Such response does not hold any
|
||||
source document and its `isExists` method returns `false`.
|
||||
|
||||
|
@ -142,7 +113,7 @@ which needs to be handled as follows:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-indexnotfound]
|
||||
include-tagged::{doc-tests-file}[{api}-indexnotfound]
|
||||
--------------------------------------------------
|
||||
<1> Handle the exception thrown because the index does not exist
|
||||
|
||||
|
@ -151,6 +122,6 @@ document has a different version number, a version conflict is raised:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-conflict]
|
||||
include-tagged::{doc-tests-file}[{api}-conflict]
|
||||
--------------------------------------------------
|
||||
<1> The raised exception indicates that a version conflict error was returned
|
||||
|
|
|
@ -1,14 +1,20 @@
|
|||
[[java-rest-high-document-update]]
|
||||
--
|
||||
:api: update
|
||||
:request: UpdateRequest
|
||||
:response: UpdateResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Update API
|
||||
|
||||
[[java-rest-high-document-update-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Update Request
|
||||
|
||||
An `UpdateRequest` requires the following arguments:
|
||||
An +{request}+ requires the following arguments:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Index
|
||||
<2> Type
|
||||
|
@ -22,7 +28,7 @@ The script can be provided as an inline script:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-inline-script]
|
||||
include-tagged::{doc-tests-file}[{api}-request-with-inline-script]
|
||||
--------------------------------------------------
|
||||
<1> Script parameters provided as a `Map` of objects
|
||||
<2> Create an inline script using the `painless` language and the previous parameters
|
||||
|
@ -32,7 +38,7 @@ Or as a stored script:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-stored-script]
|
||||
include-tagged::{doc-tests-file}[{api}-request-with-stored-script]
|
||||
--------------------------------------------------
|
||||
<1> Reference to a script stored under the name `increment-field` in the `painless` language
|
||||
<2> Sets the script in the update request
|
||||
|
@ -45,27 +51,27 @@ The partial document can be provided in different ways:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-doc-as-string]
|
||||
include-tagged::{doc-tests-file}[{api}-request-with-doc-as-string]
|
||||
--------------------------------------------------
|
||||
<1> Partial document source provided as a `String` in JSON format
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-doc-as-map]
|
||||
include-tagged::{doc-tests-file}[{api}-request-with-doc-as-map]
|
||||
--------------------------------------------------
|
||||
<1> Partial document source provided as a `Map` which gets automatically converted
|
||||
to JSON format
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-doc-as-xcontent]
|
||||
include-tagged::{doc-tests-file}[{api}-request-with-doc-as-xcontent]
|
||||
--------------------------------------------------
|
||||
<1> Partial document source provided as an `XContentBuilder` object, the Elasticsearch
|
||||
built-in helpers to generate JSON content
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-shortcut]
|
||||
include-tagged::{doc-tests-file}[{api}-request-shortcut]
|
||||
--------------------------------------------------
|
||||
<1> Partial document source provided as `Object` key-pairs, which gets converted to
|
||||
JSON format
|
||||
|
@ -76,7 +82,7 @@ will be inserted as a new document using the `upsert` method:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-upsert]
|
||||
include-tagged::{doc-tests-file}[{api}-request-upsert]
|
||||
--------------------------------------------------
|
||||
<1> Upsert document source provided as a `String`
|
||||
|
||||
|
@ -89,27 +95,27 @@ The following arguments can optionally be provided:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-routing]
|
||||
include-tagged::{doc-tests-file}[{api}-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> Routing value
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-timeout]
|
||||
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for primary shard to become available as a `TimeValue`
|
||||
<2> Timeout to wait for primary shard to become available as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Refresh policy as a `WriteRequest.RefreshPolicy` instance
|
||||
<2> Refresh policy as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-retry]
|
||||
include-tagged::{doc-tests-file}[{api}-request-retry]
|
||||
--------------------------------------------------
|
||||
<1> How many times to retry the update operation if the document to update has
|
||||
been changed by another operation between the get and indexing phases of the
|
||||
|
@ -117,103 +123,68 @@ update operation
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-no-source]
|
||||
include-tagged::{doc-tests-file}[{api}-request-no-source]
|
||||
--------------------------------------------------
|
||||
<1> Enable source retrieval, disabled by default
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-source-include]
|
||||
include-tagged::{doc-tests-file}[{api}-request-source-include]
|
||||
--------------------------------------------------
|
||||
<1> Configure source inclusion for specific fields
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-source-exclude]
|
||||
include-tagged::{doc-tests-file}[{api}-request-source-exclude]
|
||||
--------------------------------------------------
|
||||
<1> Configure source exclusion for specific fields
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-version]
|
||||
include-tagged::{doc-tests-file}[{api}-request-version]
|
||||
--------------------------------------------------
|
||||
<1> Version
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-detect-noop]
|
||||
include-tagged::{doc-tests-file}[{api}-request-detect-noop]
|
||||
--------------------------------------------------
|
||||
<1> Disable the noop detection
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-scripted-upsert]
|
||||
include-tagged::{doc-tests-file}[{api}-request-scripted-upsert]
|
||||
--------------------------------------------------
|
||||
<1> Indicate that the script must run regardless of whether the document exists or not,
|
||||
ie the script takes care of creating the document if it does not already exist.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-doc-upsert]
|
||||
include-tagged::{doc-tests-file}[{api}-request-doc-upsert]
|
||||
--------------------------------------------------
|
||||
<1> Indicate that the partial document must be used as the upsert document if it
|
||||
does not exist yet.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-active-shards]
|
||||
include-tagged::{doc-tests-file}[{api}-request-active-shards]
|
||||
--------------------------------------------------
|
||||
<1> Sets the number of shard copies that must be active before proceeding with
|
||||
the update operation.
|
||||
<2> Number of shard copies provided as a `ActiveShardCount`: can be `ActiveShardCount.ALL`,
|
||||
`ActiveShardCount.ONE` or `ActiveShardCount.DEFAULT` (default)
|
||||
|
||||
[[java-rest-high-document-update-sync]]
|
||||
==== Synchronous Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-update-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of an update request requires both the `UpdateRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `UpdateRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `UpdateResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
||||
|
||||
[[java-rest-high-document-update-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Update Response
|
||||
|
||||
The returned `UpdateResponse` allows to retrieve information about the executed
|
||||
operation as follows:
|
||||
The returned +{response}+ allows to retrieve information about the executed
|
||||
operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Handle the case where the document was created for the first time (upsert)
|
||||
<2> Handle the case where the document was updated
|
||||
|
@ -227,7 +198,7 @@ source of the updated document:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-getresult]
|
||||
include-tagged::{doc-tests-file}[{api}-getresult]
|
||||
--------------------------------------------------
|
||||
<1> Retrieve the updated document as a `GetResult`
|
||||
<2> Retrieve the source of the updated document as a `String`
|
||||
|
@ -240,7 +211,7 @@ It is also possible to check for shard failures:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-failure]
|
||||
include-tagged::{doc-tests-file}[{api}-failure]
|
||||
--------------------------------------------------
|
||||
<1> Handle the situation where number of successful shards is less than
|
||||
total shards
|
||||
|
@ -252,7 +223,7 @@ which needs to be handled as follows:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-docnotfound]
|
||||
include-tagged::{doc-tests-file}[{api}-docnotfound]
|
||||
--------------------------------------------------
|
||||
<1> Handle the exception thrown because the document not exist
|
||||
|
||||
|
@ -261,6 +232,6 @@ be thrown:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-conflict]
|
||||
include-tagged::{doc-tests-file}[{api}-conflict]
|
||||
--------------------------------------------------
|
||||
<1> The raised exception indicates that a version conflict error was returned.
|
||||
|
|
|
@ -38,7 +38,7 @@ completed the `ActionListener` is called back using the `onResponse` method
|
|||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for +{response}+ looks like:
|
||||
A typical listener for +{api}+ looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
[[java-rest-high-x-pack-ml-close-job]]
|
||||
--
|
||||
:api: close-job
|
||||
:request: CloseJobRequest
|
||||
:response: CloseJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Close Job API
|
||||
|
||||
The Close Job API provides the ability to close {ml} jobs in the cluster.
|
||||
It accepts a `CloseJobRequest` object and responds
|
||||
with a `CloseJobResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-close-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Close Job Request
|
||||
|
||||
A `CloseJobRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing existing job IDs
|
||||
<2> Optionally used to close a failed job, or to forcefully close a job
|
||||
|
@ -22,38 +27,14 @@ which has not responded to its initial close request.
|
|||
<4> Optionally setting the `timeout` value for how long the
|
||||
execution should wait for the job to be closed.
|
||||
|
||||
[[java-rest-high-x-pack-ml-close-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Close Job Response
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-execute]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `isClosed()` from the `CloseJobResponse` indicates if the job was successfully
|
||||
<1> `isClosed()` from the +{response}+ indicates if the job was successfully
|
||||
closed or not.
|
||||
|
||||
[[java-rest-high-x-pack-ml-close-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `CloseJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `CloseJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,59 +1,33 @@
|
|||
[[java-rest-high-x-pack-ml-delete-calendar]]
|
||||
--
|
||||
:api: delete-calendar
|
||||
:request: DeleteCalendarRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Delete Calendar API
|
||||
Delete a {ml} calendar.
|
||||
The API accepts a `DeleteCalendarRequest` and responds
|
||||
with a `AcknowledgedResponse` object.
|
||||
The API accepts a +{request}+ and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-calendar-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Delete Calendar Request
|
||||
|
||||
A `DeleteCalendar` object requires a non-null `calendarId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
---------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing Calendar
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-calendar-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Delete Calendar Response
|
||||
|
||||
The returned `AcknowledgedResponse` object indicates the acknowledgement of the request:
|
||||
The returned +{response}+ object indicates the acknowledgement of the request:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
---------------------------------------------------
|
||||
<1> `isAcknowledged` was the deletion request acknowledged or not
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-calendar-execution]]
|
||||
==== Execution
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-calendar-async]]
|
||||
==== Delete Calendar Asynchronously
|
||||
|
||||
This request can also be made asynchronously.
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-execute-async]
|
||||
---------------------------------------------------
|
||||
<1> The `DeleteCalendarRequest` to execute and the `ActionListener` to alert on completion or error.
|
||||
|
||||
The deletion request returns immediately. Once the request is completed, the `ActionListener` is
|
||||
called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when
|
||||
making the request.
|
||||
|
||||
A typical listener for a `DeleteCalendarRequest` could be defined as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-listener]
|
||||
---------------------------------------------------
|
||||
<1> The action to be taken when it is completed
|
||||
<2> What to do when a failure occurs
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,49 +1,32 @@
|
|||
[[java-rest-high-x-pack-ml-delete-datafeed]]
|
||||
--
|
||||
:api: delete-datafeed
|
||||
:request: DeleteDatafeedRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
[id="{upid}-delete-datafeed"]
|
||||
=== Delete Datafeed API
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-datafeed-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Delete Datafeed Request
|
||||
|
||||
A `DeleteDatafeedRequest` object requires a non-null `datafeedId` and can optionally set `force`.
|
||||
Can be executed as follows:
|
||||
A +{request}+ object requires a non-null `datafeedId` and can optionally set `force`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
---------------------------------------------------
|
||||
<1> Use to forcefully delete a started datafeed;
|
||||
this method is quicker than stopping and deleting the datafeed.
|
||||
Defaults to `false`.
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-datafeed-response]]
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Delete Datafeed Response
|
||||
|
||||
The returned `AcknowledgedResponse` object indicates the acknowledgement of the request:
|
||||
The returned +{response}+ object indicates the acknowledgement of the request:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
---------------------------------------------------
|
||||
<1> `isAcknowledged` was the deletion request acknowledged or not
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-datafeed-async]]
|
||||
==== Delete Datafeed Asynchronously
|
||||
|
||||
This request can also be made asynchronously.
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-async]
|
||||
---------------------------------------------------
|
||||
<1> The `DeleteDatafeedRequest` to execute and the `ActionListener` to alert on completion or error.
|
||||
|
||||
The deletion request returns immediately. Once the request is completed, the `ActionListener` is
|
||||
called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when
|
||||
making the request.
|
||||
|
||||
A typical listener for a `DeleteDatafeedRequest` could be defined as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-listener]
|
||||
---------------------------------------------------
|
||||
<1> The action to be taken when it is completed
|
||||
<2> What to do when a failure occurs
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
[[java-rest-high-x-pack-ml-delete-forecast]]
|
||||
--
|
||||
:api: delete-forecast
|
||||
:request: DeleteForecastRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Delete Forecast API
|
||||
|
||||
The Delete Forecast API provides the ability to delete a {ml} job's
|
||||
forecast in the cluster.
|
||||
It accepts a `DeleteForecastRequest` object and responds
|
||||
with an `AcknowledgedResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with an +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-forecast-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Delete Forecast Request
|
||||
|
||||
A `DeleteForecastRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
All other fields are optional for the request.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -24,55 +29,23 @@ The following arguments are optional.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-request-options]
|
||||
include-tagged::{doc-tests-file}[{api}-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Sets the specific forecastIds to delete, can be set to `_all` to indicate ALL forecasts for the given
|
||||
`jobId`
|
||||
<2> Set the timeout for the request to respond, default is 30 seconds
|
||||
<3> Set the `allow_no_forecasts` option. When `true` no error will be returned if an `_all`
|
||||
request finds no forecasts. It defaults to `true`
|
||||
request finds no forecasts. It defaults to `true`
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-forecast-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-forecast-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `DeleteForecastRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `AcknowledgedResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-delete-forecast-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Delete Forecast Response
|
||||
|
||||
An `AcknowledgedResponse` contains an acknowledgement of the forecast(s) deletion
|
||||
An +{response}+ contains an acknowledgement of the forecast(s) deletion
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `isAcknowledged()` indicates if the forecast was successfully deleted or not.
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,49 +1,53 @@
|
|||
[[java-rest-high-x-pack-ml-delete-job]]
|
||||
--
|
||||
:api: delete-job
|
||||
:request: DeleteJobRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Delete Job API
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Delete Job Request
|
||||
|
||||
A `DeleteJobRequest` object requires a non-null `jobId` and can optionally set `force`.
|
||||
Can be executed as follows:
|
||||
A +{request}+ object requires a non-null `jobId` and can optionally set `force`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
---------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
==== Optional Arguments
|
||||
|
||||
The following arguments are optional:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-force]
|
||||
---------------------------------------------------
|
||||
<1> Use to forcefully delete an opened job;
|
||||
this method is quicker than closing and deleting the job.
|
||||
Defaults to `false`
|
||||
Defaults to `false`.
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-job-response]]
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-wait-for-completion]
|
||||
---------------------------------------------------
|
||||
<1> Use to set whether the request should wait until the operation has completed before returning.
|
||||
Defaults to `true`.
|
||||
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Delete Job Response
|
||||
|
||||
The returned `AcknowledgedResponse` object indicates the acknowledgement of the request:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response]
|
||||
---------------------------------------------------
|
||||
<1> `isAcknowledged` was the deletion request acknowledged or not
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-job-async]]
|
||||
==== Delete Job Asynchronously
|
||||
|
||||
This request can also be made asynchronously.
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-async]
|
||||
---------------------------------------------------
|
||||
<1> The `DeleteJobRequest` to execute and the `ActionListener` to alert on completion or error.
|
||||
|
||||
The deletion request returns immediately. Once the request is completed, the `ActionListener` is
|
||||
called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when
|
||||
making the request.
|
||||
|
||||
A typical listener for a `DeleteJobRequest` could be defined as follows:
|
||||
The returned +{response}+ object indicates the acknowledgement of the job deletion or
|
||||
the deletion task depending on whether the request was set to wait for completion:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-listener]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
---------------------------------------------------
|
||||
<1> The action to be taken when it is completed
|
||||
<2> What to do when a failure occurs
|
||||
<1> whether was job deletion was acknowledged or not; will be `null` when set not to wait for completion
|
||||
<2> the id of the job deletion task; will be `null` when set to wait for completion
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
[[java-rest-high-x-pack-ml-flush-job]]
|
||||
--
|
||||
:api: flush-job
|
||||
:request: FlushJobRequest
|
||||
:response: FlushJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Flush Job API
|
||||
|
||||
The Flush Job API provides the ability to flush a {ml} job's
|
||||
datafeed in the cluster.
|
||||
It accepts a `FlushJobRequest` object and responds
|
||||
with a `FlushJobResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-flush-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Flush Job Request
|
||||
|
||||
A `FlushJobRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
All other fields are optional for the request.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -24,7 +29,7 @@ The following arguments are optional.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request-options]
|
||||
include-tagged::{doc-tests-file}[{api}-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Set request to calculate the interim results
|
||||
<2> Set the advanced time to flush to the particular time value
|
||||
|
@ -34,50 +39,18 @@ to calculate the interim results (requires `calc_interim` to be `true`)
|
|||
to calculate interim results (requires `calc_interim` to be `true`)
|
||||
<5> Set the skip time to skip a particular time value
|
||||
|
||||
[[java-rest-high-x-pack-ml-flush-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-flush-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `FlushJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `FlushJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-flush-job-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Flush Job Response
|
||||
|
||||
A `FlushJobResponse` contains an acknowledgement and an optional end date for the
|
||||
A +{response}+ contains an acknowledgement and an optional end date for the
|
||||
last finalized bucket
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `isFlushed()` indicates if the job was successfully flushed or not.
|
||||
<2> `getLastFinalizedBucketEnd()` provides the timestamp
|
||||
(in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
||||
(in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
[[java-rest-high-x-pack-ml-forecast-job]]
|
||||
--
|
||||
:api: forecast-job
|
||||
:request: ForecastJobRequest
|
||||
:response: ForecastJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Forecast Job API
|
||||
|
||||
The Forecast Job API provides the ability to forecast a {ml} job's behavior based
|
||||
on historical data.
|
||||
It accepts a `ForecastJobRequest` object and responds
|
||||
with a `ForecastJobResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Forecast Job Request
|
||||
|
||||
A `ForecastJobRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
All other fields are optional for the request.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -24,53 +29,21 @@ The following arguments are optional.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request-options]
|
||||
include-tagged::{doc-tests-file}[{api}-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Set when the forecast for the job should expire
|
||||
<2> Set how far into the future should the forecast predict
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ForecastJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `ForecastJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Forecast Job Response
|
||||
|
||||
A `ForecastJobResponse` contains an acknowledgement and the forecast ID
|
||||
A +{response}+ contains an acknowledgement and the forecast ID
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `isAcknowledged()` indicates if the forecast was successful
|
||||
<2> `getForecastId()` provides the ID of the forecast that was created
|
||||
<2> `getForecastId()` provides the ID of the forecast that was created
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
[[java-rest-high-x-pack-ml-get-buckets]]
|
||||
--
|
||||
:api: get-buckets
|
||||
:request: GetBucketsRequest
|
||||
:response: GetBucketsResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Buckets API
|
||||
|
||||
The Get Buckets API retrieves one or more bucket results.
|
||||
It accepts a `GetBucketsRequest` object and responds
|
||||
with a `GetBucketsResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-buckets-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Buckets Request
|
||||
|
||||
A `GetBucketsRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -21,105 +26,69 @@ The following arguments are optional:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-timestamp]
|
||||
include-tagged::{doc-tests-file}[{api}-timestamp]
|
||||
--------------------------------------------------
|
||||
<1> The timestamp of the bucket to get. Otherwise it will return all buckets.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-anomaly-score]
|
||||
include-tagged::{doc-tests-file}[{api}-anomaly-score]
|
||||
--------------------------------------------------
|
||||
<1> Buckets with anomaly scores greater or equal than this value will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-desc]
|
||||
include-tagged::{doc-tests-file}[{api}-desc]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, the buckets are sorted in descending order. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end]
|
||||
include-tagged::{doc-tests-file}[{api}-end]
|
||||
--------------------------------------------------
|
||||
<1> Buckets with timestamps earlier than this time will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-exclude-interim]
|
||||
include-tagged::{doc-tests-file}[{api}-exclude-interim]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, interim results will be excluded. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-expand]
|
||||
include-tagged::{doc-tests-file}[{api}-expand]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, buckets will include their anomaly records. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-page]
|
||||
include-tagged::{doc-tests-file}[{api}-page]
|
||||
--------------------------------------------------
|
||||
<1> The page parameters `from` and `size`. `from` specifies the number of buckets to skip.
|
||||
`size` specifies the maximum number of buckets to get. Defaults to `0` and `100` respectively.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-sort]
|
||||
include-tagged::{doc-tests-file}[{api}-sort]
|
||||
--------------------------------------------------
|
||||
<1> The field to sort buckets on. Defaults to `timestamp`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-start]
|
||||
include-tagged::{doc-tests-file}[{api}-start]
|
||||
--------------------------------------------------
|
||||
<1> Buckets with timestamps on or after this time will be returned.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-buckets-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-buckets-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetBucketsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetBucketsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-buckets-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Buckets Response
|
||||
|
||||
The returned `GetBucketsResponse` contains the requested buckets:
|
||||
The returned +{response}+ contains the requested buckets:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of buckets that were matched
|
||||
<2> The buckets retrieved
|
|
@ -1,83 +1,53 @@
|
|||
[[java-rest-high-x-pack-ml-get-calendars]]
|
||||
--
|
||||
:api: get-calendars
|
||||
:request: GetCalendarsRequest
|
||||
:response: GetCalendarsResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Calendars API
|
||||
Retrieves one or more calendar objects.
|
||||
It accepts a `GetCalendarsRequest` and responds
|
||||
with a `GetCalendarsResponse` object.
|
||||
It accepts a +{request}+ and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-calendars-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Calendars Request
|
||||
|
||||
By default a `GetCalendarsRequest` with no calendar Id set will return all
|
||||
By default a +{request}+ with no calendar Id set will return all
|
||||
calendars. Using the literal `_all` also returns all calendars.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request for all calendars
|
||||
|
||||
|
||||
==== Optional Arguments
|
||||
The following arguments are optional:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-id]
|
||||
include-tagged::{doc-tests-file}[{api}-id]
|
||||
--------------------------------------------------
|
||||
<1> Construct a request for the single calendar `holidays`
|
||||
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-page]
|
||||
include-tagged::{doc-tests-file}[{api}-page]
|
||||
--------------------------------------------------
|
||||
<1> The page parameters `from` and `size`. `from` specifies the number of calendars to skip.
|
||||
`size` specifies the maximum number of calendars to get. Defaults to `0` and `100` respectively.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-calendars-execution]]
|
||||
==== Execution
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execution]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-calendars-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetCalendarsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetCalendarsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-calendars-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get calendars Response
|
||||
|
||||
The returned `GetCalendarsResponse` contains the requested calendars:
|
||||
The returned +{response}+ contains the requested calendars:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of calendars that were matched
|
||||
<2> The calendars retrieved
|
|
@ -1,18 +1,23 @@
|
|||
[[java-rest-high-x-pack-ml-get-categories]]
|
||||
--
|
||||
:api: get-categories
|
||||
:request: GetCategoriesRequest
|
||||
:response: GetCategoriesResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Categories API
|
||||
|
||||
The Get Categories API retrieves one or more category results.
|
||||
It accepts a `GetCategoriesRequest` object and responds
|
||||
with a `GetCategoriesResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-categories-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Categories Request
|
||||
|
||||
A `GetCategoriesRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -21,63 +26,27 @@ The following arguments are optional:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-category-id]
|
||||
include-tagged::{doc-tests-file}[{api}-category-id]
|
||||
--------------------------------------------------
|
||||
<1> The id of the category to get. Otherwise it will return all categories.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-page]
|
||||
include-tagged::{doc-tests-file}[{api}-page]
|
||||
--------------------------------------------------
|
||||
<1> The page parameters `from` and `size`. `from` specifies the number of categories to skip.
|
||||
`size` specifies the maximum number of categories to get. Defaults to `0` and `100` respectively.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-categories-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-categories-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetCategoriesRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetCategoriesResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-categories-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Categories Response
|
||||
|
||||
The returned `GetCategoriesResponse` contains the requested categories:
|
||||
The returned +{response}+ contains the requested categories:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of categories that were matched
|
||||
<2> The categories retrieved
|
|
@ -0,0 +1,40 @@
|
|||
--
|
||||
:api: get-datafeed-stats
|
||||
:request: GetDatafeedStatsRequest
|
||||
:response: GetDatafeedStatsResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Datafeed Stats API
|
||||
|
||||
The Get Datafeed Stats API provides the ability to get any number of
|
||||
{ml} datafeed's statistics in the cluster.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Datafeed Stats Request
|
||||
|
||||
A +{request}+ object can have any number of `datafeedId`
|
||||
entries. However, they all must be non-null. An empty list is the same as
|
||||
requesting statistics for all datafeeds.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing existing `datafeedIds`, can contain wildcards
|
||||
<2> Whether to ignore if a wildcard expression matches no datafeeds.
|
||||
(This includes `_all` string or when no datafeeds have been specified)
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Datafeed Stats Response
|
||||
The returned +{response}+ contains the requested datafeed statistics:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `count()` indicates the number of datafeeds statistics found
|
||||
<2> `datafeedStats()` is the collection of {ml} `DatafeedStats` objects found
|
|
@ -1,56 +1,37 @@
|
|||
[[java-rest-high-x-pack-ml-get-datafeed]]
|
||||
--
|
||||
:api: get-datafeed
|
||||
:request: GetDatafeedRequest
|
||||
:response: GetDatafeedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Datafeed API
|
||||
|
||||
The Get Datafeed API provides the ability to get {ml} datafeeds in the cluster.
|
||||
It accepts a `GetDatafeedRequest` object and responds
|
||||
with a `GetDatafeedResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-datafeed-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Datafeed Request
|
||||
|
||||
A `GetDatafeedRequest` object gets can have any number of `datafeedId` entries.
|
||||
A +{request}+ object gets can have any number of `datafeedId` entries.
|
||||
However, they all must be non-null. An empty list is the same as requesting for all datafeeds.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing existing `datafeedIds`, can contain wildcards
|
||||
<2> Whether to ignore if a wildcard expression matches no datafeeds.
|
||||
(This includes `_all` string or when no datafeeds have been specified)
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-datafeed-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Datafeed Response
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-execute]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of retrieved datafeeds
|
||||
<2> The retrieved datafeeds
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-datafeed-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetDatafeedRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `GetDatafeedResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
[[java-rest-high-x-pack-ml-get-influencers]]
|
||||
--
|
||||
:api: get-influencers
|
||||
:request: GetInfluencersRequest
|
||||
:response: GetInfluencersResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Influencers API
|
||||
|
||||
The Get Influencers API retrieves one or more influencer results.
|
||||
It accepts a `GetInfluencersRequest` object and responds
|
||||
with a `GetInfluencersResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-influencers-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Influencers Request
|
||||
|
||||
A `GetInfluencersRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -21,92 +26,57 @@ The following arguments are optional:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-desc]
|
||||
include-tagged::{doc-tests-file}[{api}-desc]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, the influencers are sorted in descending order. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-end]
|
||||
include-tagged::{doc-tests-file}[{api}-end]
|
||||
--------------------------------------------------
|
||||
<1> Influencers with timestamps earlier than this time will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-exclude-interim]
|
||||
include-tagged::{doc-tests-file}[{api}-exclude-interim]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, interim results will be excluded. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-influencer-score]
|
||||
include-tagged::{doc-tests-file}[{api}-influencer-score]
|
||||
--------------------------------------------------
|
||||
<1> Influencers with influencer_score greater or equal than this value will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-page]
|
||||
include-tagged::{doc-tests-file}[{api}-page]
|
||||
--------------------------------------------------
|
||||
<1> The page parameters `from` and `size`. `from` specifies the number of influencers to skip.
|
||||
`size` specifies the maximum number of influencers to get. Defaults to `0` and `100` respectively.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-sort]
|
||||
include-tagged::{doc-tests-file}[{api}-sort]
|
||||
--------------------------------------------------
|
||||
<1> The field to sort influencers on. Defaults to `influencer_score`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-start]
|
||||
include-tagged::{doc-tests-file}[{api}-start]
|
||||
--------------------------------------------------
|
||||
<1> Influencers with timestamps on or after this time will be returned.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-influencers-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-influencers-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetInfluencersRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetInfluencersResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-influencers-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Influencers Response
|
||||
|
||||
The returned `GetInfluencersResponse` contains the requested influencers:
|
||||
The returned +{response}+ contains the requested influencers:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of influencers that were matched
|
||||
<2> The influencers retrieved
|
|
@ -1,12 +1,17 @@
|
|||
[[java-rest-high-x-pack-ml-get-job-stats]]
|
||||
--
|
||||
:api: get-job-stats
|
||||
:request: GetJobStatsRequest
|
||||
:response: GetJobStatsResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Job Stats API
|
||||
|
||||
The Get Job Stats API provides the ability to get any number of
|
||||
{ml} job's statistics in the cluster.
|
||||
It accepts a `GetJobStatsRequest` object and responds
|
||||
with a `GetJobStatsResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-stats-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Job Stats Request
|
||||
|
||||
A `GetJobsStatsRequest` object can have any number of `jobId`
|
||||
|
@ -15,53 +20,21 @@ requesting statistics for all jobs.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing existing `jobIds`, can contain wildcards
|
||||
<2> Whether to ignore if a wildcard expression matches no jobs.
|
||||
(This includes `_all` string or when no jobs have been specified)
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-stats-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-stats-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetJobsStatsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `GetJobsStatsResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-stats-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Job Stats Response
|
||||
The returned `GetJobStatsResponse` contains the requested job statistics:
|
||||
The returned +{response}+ contains the requested job statistics:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `getCount()` indicates the number of jobs statistics found
|
||||
<2> `getJobStats()` is the collection of {ml} `JobStats` objects found
|
|
@ -1,57 +1,38 @@
|
|||
[[java-rest-high-x-pack-ml-get-job]]
|
||||
--
|
||||
:api: get-job
|
||||
:request: GetJobRequest
|
||||
:response: GetJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Job API
|
||||
|
||||
The Get Job API provides the ability to get {ml} jobs in the cluster.
|
||||
It accepts a `GetJobRequest` object and responds
|
||||
with a `GetJobResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Job Request
|
||||
|
||||
A `GetJobRequest` object gets can have any number of `jobId` or `groupName`
|
||||
A +{request}+ object gets can have any number of `jobId` or `groupName`
|
||||
entries. However, they all must be non-null. An empty list is the same as
|
||||
requesting for all jobs.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing existing `jobIds`, can contain wildcards
|
||||
<2> Whether to ignore if a wildcard expression matches no jobs.
|
||||
(This includes `_all` string or when no jobs have been specified)
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Job Response
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-execute]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `getCount()` from the `GetJobResponse` indicates the number of jobs found
|
||||
<1> `getCount()` from the +{response}+ indicates the number of jobs found
|
||||
<2> `getJobs()` is the collection of {ml} `Job` objects found
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `GetJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
[[java-rest-high-x-pack-ml-get-overall-buckets]]
|
||||
--
|
||||
:api: get-overall-buckets
|
||||
:request: GetOverallBucketsRequest
|
||||
:response: GetOverallBucketsResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Overall Buckets API
|
||||
|
||||
The Get Overall Buckets API retrieves overall bucket results that
|
||||
summarize the bucket results of multiple jobs.
|
||||
It accepts a `GetOverallBucketsRequest` object and responds
|
||||
with a `GetOverallBucketsResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-overall-buckets-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Overall Buckets Request
|
||||
|
||||
A `GetOverallBucketsRequest` object gets created with one or more `jobId`.
|
||||
A +{request}+ object gets created with one or more `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing job IDs `jobId1` and `jobId2`.
|
||||
|
||||
|
@ -22,86 +27,51 @@ The following arguments are optional:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-bucket-span]
|
||||
include-tagged::{doc-tests-file}[{api}-bucket-span]
|
||||
--------------------------------------------------
|
||||
<1> The span of the overall buckets. Must be greater or equal to the jobs' largest `bucket_span`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-end]
|
||||
include-tagged::{doc-tests-file}[{api}-end]
|
||||
--------------------------------------------------
|
||||
<1> Overall buckets with timestamps earlier than this time will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-exclude-interim]
|
||||
include-tagged::{doc-tests-file}[{api}-exclude-interim]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, interim results will be excluded. Overall buckets are interim if any of the job buckets
|
||||
within the overall bucket interval are interim. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-overall-score]
|
||||
include-tagged::{doc-tests-file}[{api}-overall-score]
|
||||
--------------------------------------------------
|
||||
<1> Overall buckets with overall scores greater or equal than this value will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-start]
|
||||
include-tagged::{doc-tests-file}[{api}-start]
|
||||
--------------------------------------------------
|
||||
<1> Overall buckets with timestamps on or after this time will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-top-n]
|
||||
include-tagged::{doc-tests-file}[{api}-top-n]
|
||||
--------------------------------------------------
|
||||
<1> The number of top job bucket scores to be used in the `overall_score` calculation. Defaults to `1`.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-overall-buckets-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-overall-buckets-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetOverallBucketsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetBucketsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-overall-buckets-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Overall Buckets Response
|
||||
|
||||
The returned `GetOverallBucketsResponse` contains the requested buckets:
|
||||
The returned +{response}+ contains the requested buckets:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of overall buckets that were matched
|
||||
<2> The overall buckets retrieved
|
|
@ -1,18 +1,23 @@
|
|||
[[java-rest-high-x-pack-ml-get-records]]
|
||||
--
|
||||
:api: get-records
|
||||
:request: GetRecordsRequest
|
||||
:response: GetRecordsResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Records API
|
||||
|
||||
The Get Records API retrieves one or more record results.
|
||||
It accepts a `GetRecordsRequest` object and responds
|
||||
with a `GetRecordsResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-records-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Get Records Request
|
||||
|
||||
A `GetRecordsRequest` object gets created with an existing non-null `jobId`.
|
||||
A +{request}+ object gets created with an existing non-null `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
|
@ -21,93 +26,57 @@ The following arguments are optional:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-desc]
|
||||
include-tagged::{doc-tests-file}[{api}-desc]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, the records are sorted in descending order. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-end]
|
||||
include-tagged::{doc-tests-file}[{api}-end]
|
||||
--------------------------------------------------
|
||||
<1> Records with timestamps earlier than this time will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-exclude-interim]
|
||||
include-tagged::{doc-tests-file}[{api}-exclude-interim]
|
||||
--------------------------------------------------
|
||||
<1> If `true`, interim results will be excluded. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-page]
|
||||
include-tagged::{doc-tests-file}[{api}-page]
|
||||
--------------------------------------------------
|
||||
<1> The page parameters `from` and `size`. `from` specifies the number of records to skip.
|
||||
`size` specifies the maximum number of records to get. Defaults to `0` and `100` respectively.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-record-score]
|
||||
include-tagged::{doc-tests-file}[{api}-record-score]
|
||||
--------------------------------------------------
|
||||
<1> Records with record_score greater or equal than this value will be returned.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-sort]
|
||||
include-tagged::{doc-tests-file}[{api}-sort]
|
||||
--------------------------------------------------
|
||||
<1> The field to sort records on. Defaults to `record_score`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-start]
|
||||
include-tagged::{doc-tests-file}[{api}-start]
|
||||
--------------------------------------------------
|
||||
<1> Records with timestamps on or after this time will be returned.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-records-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-records-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetRecordsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetRecordsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-records-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Get Records Response
|
||||
|
||||
The returned `GetRecordsResponse` contains the requested records:
|
||||
The returned +{response}+ contains the requested records:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The count of records that were matched
|
||||
<2> The records retrieved
|
|
@ -1,55 +1,36 @@
|
|||
[[java-rest-high-x-pack-ml-open-job]]
|
||||
--
|
||||
:api: open-job
|
||||
:request: OpenJobRequest
|
||||
:response: OpenJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Open Job API
|
||||
|
||||
The Open Job API provides the ability to open {ml} jobs in the cluster.
|
||||
It accepts a `OpenJobRequest` object and responds
|
||||
with a `OpenJobResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-open-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Open Job Request
|
||||
|
||||
An `OpenJobRequest` object gets created with an existing non-null `jobId`.
|
||||
An +{request}+ object gets created with an existing non-null `jobId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
<2> Optionally setting the `timeout` value for how long the
|
||||
execution should wait for the job to be opened.
|
||||
|
||||
[[java-rest-high-x-pack-ml-open-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Open Job Response
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-execute]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `isOpened()` from the `OpenJobResponse` indicates if the job was successfully
|
||||
<1> `isOpened()` from the +{response}+ indicates if the job was successfully
|
||||
opened or not.
|
||||
|
||||
[[java-rest-high-x-pack-ml-open-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `OpenJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `OpenJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
include::../execution.asciidoc[]
|
|
@ -1,27 +1,32 @@
|
|||
[[java-rest-high-x-pack-ml-post-data]]
|
||||
--
|
||||
:api: post-data
|
||||
:request: PostDataRequest
|
||||
:response: PostDataResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Post Data API
|
||||
|
||||
The Post Data API provides the ability to post data to an open
|
||||
{ml} job in the cluster.
|
||||
It accepts a `PostDataRequest` object and responds
|
||||
with a `PostDataResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Post Data Request
|
||||
|
||||
A `PostDataRequest` object gets created with an existing non-null `jobId`
|
||||
A +{request}+ object gets created with an existing non-null `jobId`
|
||||
and the `XContentType` being sent. Individual docs can be added
|
||||
incrementally via the `PostDataRequest.JsonBuilder#addDoc` method.
|
||||
These are then serialized and sent in bulk when passed to the `PostDataRequest`.
|
||||
These are then serialized and sent in bulk when passed to the +{request}+.
|
||||
|
||||
Alternatively, the serialized bulk content can be set manually, along with its `XContentType`
|
||||
through one of the other `PostDataRequest` constructors.
|
||||
through one of the other +{request}+ constructors.
|
||||
|
||||
Only `XContentType.JSON` and `XContentType.SMILE` are supported.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Create a new `PostDataRequest.JsonBuilder` object for incrementally adding documents
|
||||
<2> Add a new document as a `Map<String, Object>` object
|
||||
|
@ -34,53 +39,21 @@ The following arguments are optional.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request-options]
|
||||
include-tagged::{doc-tests-file}[{api}-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Set the start of the bucket resetting time
|
||||
<2> Set the end of the bucket resetting time
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PostDataRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `PostDataResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Post Data Response
|
||||
|
||||
A `PostDataResponse` contains current data processing statistics.
|
||||
A +{response}+ contains current data processing statistics.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `getDataCounts()` a `DataCounts` object containing the current
|
||||
data processing counts.
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
--
|
||||
:api: preview-datafeed
|
||||
:request: PreviewDatafeedRequest
|
||||
:response: PreviewDatafeedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Preview Datafeed API
|
||||
|
||||
The Preview Datafeed API provides the ability to preview a {ml} datafeed's data
|
||||
in the cluster. It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Preview Datafeed Request
|
||||
|
||||
A +{request}+ object is created referencing a non-null `datafeedId`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `datafeedId`
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Preview Datafeed Response
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The raw +BytesReference+ of the data preview
|
||||
<2> A +List<Map<String,Object>>+ that represents the previewed data
|
||||
|
||||
include::../execution.asciidoc[]
|
|
@ -1,65 +1,35 @@
|
|||
[[java-rest-high-x-pack-ml-put-calendar]]
|
||||
--
|
||||
:api: put-calendar
|
||||
:request: PutCalendarRequest
|
||||
:response: PutCalendarResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Put Calendar API
|
||||
Creates a new {ml} calendar.
|
||||
The API accepts a `PutCalendarRequest` and responds
|
||||
with a `PutCalendarResponse` object.
|
||||
The API accepts a +{request}+ and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Put Calendar Request
|
||||
|
||||
A `PutCalendarRequest` is constructed with a Calendar object
|
||||
A +{request}+ is constructed with a Calendar object
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Create a request with the given Calendar
|
||||
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Put Calendar Response
|
||||
|
||||
The returned `PutCalendarResponse` contains the created Calendar:
|
||||
The returned +{response}+ contains the created Calendar:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The created Calendar
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-execution]]
|
||||
==== Execution
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PutCalendarResquest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `PutCalendarResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,22 +1,27 @@
|
|||
[[java-rest-high-x-pack-ml-put-datafeed]]
|
||||
--
|
||||
:api: put-datafeed
|
||||
:request: PutDatafeedRequest
|
||||
:response: PutDatafeedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Put Datafeed API
|
||||
|
||||
The Put Datafeed API can be used to create a new {ml} datafeed
|
||||
in the cluster. The API accepts a `PutDatafeedRequest` object
|
||||
as a request and returns a `PutDatafeedResponse`.
|
||||
in the cluster. The API accepts a +{request}+ object
|
||||
as a request and returns a +{response}+.
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-datafeed-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Put Datafeed Request
|
||||
|
||||
A `PutDatafeedRequest` requires the following argument:
|
||||
A +{request}+ requires the following argument:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> The configuration of the {ml} datafeed to create
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-datafeed-config]]
|
||||
[id="{upid}-{api}-config"]
|
||||
==== Datafeed Configuration
|
||||
|
||||
The `DatafeedConfig` object contains all the details about the {ml} datafeed
|
||||
|
@ -26,7 +31,7 @@ A `DatafeedConfig` requires the following arguments:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config]
|
||||
include-tagged::{doc-tests-file}[{api}-config]
|
||||
--------------------------------------------------
|
||||
<1> The datafeed ID and the job ID
|
||||
<2> The indices that contain the data to retrieve and feed into the job
|
||||
|
@ -36,89 +41,52 @@ The following arguments are optional:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-chunking-config]
|
||||
include-tagged::{doc-tests-file}[{api}-config-set-chunking-config]
|
||||
--------------------------------------------------
|
||||
<1> Specifies how data searches are split into time chunks.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-frequency]
|
||||
include-tagged::{doc-tests-file}[{api}-config-set-frequency]
|
||||
--------------------------------------------------
|
||||
<1> The interval at which scheduled queries are made while the datafeed runs in real time.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-query]
|
||||
include-tagged::{doc-tests-file}[{api}-config-set-query]
|
||||
--------------------------------------------------
|
||||
<1> A query to filter the search results by. Defaults to the `match_all` query.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-query-delay]
|
||||
include-tagged::{doc-tests-file}[{api}-config-set-query-delay]
|
||||
--------------------------------------------------
|
||||
<1> The time interval behind real time that data is queried.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-script-fields]
|
||||
include-tagged::{doc-tests-file}[{api}-config-set-script-fields]
|
||||
--------------------------------------------------
|
||||
<1> Allows the use of script fields.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-scroll-size]
|
||||
include-tagged::{doc-tests-file}[{api}-config-set-scroll-size]
|
||||
--------------------------------------------------
|
||||
<1> The `size` parameter used in the searches.
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-datafeed-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The Put Datafeed API can be executed through a `MachineLearningClient`
|
||||
instance. Such an instance can be retrieved from a `RestHighLevelClient`
|
||||
using the `machineLearning()` method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-datafeed-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned `PutDatafeedResponse` returns the full representation of
|
||||
The returned +{response}+ returns the full representation of
|
||||
the new {ml} datafeed if it has been successfully created. This will
|
||||
contain the creation time and other fields initialized using
|
||||
default values:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The created datafeed
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-datafeed-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PutDatafeedRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `PutDatafeedResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
|
|
@ -1,22 +1,27 @@
|
|||
[[java-rest-high-x-pack-ml-put-job]]
|
||||
--
|
||||
:api: put-job
|
||||
:request: PutJobRequest
|
||||
:response: PutJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Put Job API
|
||||
|
||||
The Put Job API can be used to create a new {ml} job
|
||||
in the cluster. The API accepts a `PutJobRequest` object
|
||||
as a request and returns a `PutJobResponse`.
|
||||
in the cluster. The API accepts a +{request}+ object
|
||||
as a request and returns a +{response}+.
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Put Job Request
|
||||
|
||||
A `PutJobRequest` requires the following argument:
|
||||
A +{request}+ requires the following argument:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> The configuration of the {ml} job to create as a `Job`
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-config]]
|
||||
[id="{upid}-{api}-config"]
|
||||
==== Job Configuration
|
||||
|
||||
The `Job` object contains all the details about the {ml} job
|
||||
|
@ -26,14 +31,14 @@ A `Job` requires the following arguments:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-config]
|
||||
include-tagged::{doc-tests-file}[{api}-config]
|
||||
--------------------------------------------------
|
||||
<1> The job ID
|
||||
<2> An analysis configuration
|
||||
<3> A data description
|
||||
<4> Optionally, a human-readable description
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-analysis-config]]
|
||||
[id="{upid}-{api}-analysis-config"]
|
||||
==== Analysis Configuration
|
||||
|
||||
The analysis configuration of the {ml} job is defined in the `AnalysisConfig`.
|
||||
|
@ -64,7 +69,7 @@ An example of building a `Detector` instance is as follows:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-detector]
|
||||
include-tagged::{doc-tests-file}[{api}-detector]
|
||||
--------------------------------------------------
|
||||
<1> The function to use
|
||||
<2> The field to apply the function to
|
||||
|
@ -74,13 +79,13 @@ Then the same configuration would be:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-analysis-config]
|
||||
include-tagged::{doc-tests-file}[{api}-analysis-config]
|
||||
--------------------------------------------------
|
||||
<1> Create a list of detectors
|
||||
<2> Pass the list of detectors to the analysis config builder constructor
|
||||
<3> The bucket span
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-data-description]]
|
||||
[id="{upid}-{api}-data-description"]
|
||||
==== Data Description
|
||||
|
||||
After defining the analysis config, the next thing to define is the
|
||||
|
@ -103,59 +108,22 @@ configuration would be:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-data-description]
|
||||
include-tagged::{doc-tests-file}[{api}-data-description]
|
||||
--------------------------------------------------
|
||||
<1> The time field
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The Put Job API can be executed through a `MachineLearningClient`
|
||||
instance. Such an instance can be retrieved from a `RestHighLevelClient`
|
||||
using the `machineLearning()` method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned `PutJobResponse` returns the full representation of
|
||||
The returned +{response}+ returns the full representation of
|
||||
the new {ml} job if it has been successfully created. This will
|
||||
contain the creation time and other fields initialized using
|
||||
default values:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The creation time is a field that was not passed in the `Job` object in the request
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-job-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PutJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `PutJobResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
[[java-rest-high-x-pack-ml-start-datafeed]]
|
||||
--
|
||||
:api: start-datafeed
|
||||
:request: StartDatafeedRequest
|
||||
:response: StartDatafeedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Start Datafeed API
|
||||
|
||||
The Start Datafeed API provides the ability to start a {ml} datafeed in the cluster.
|
||||
It accepts a `StartDatafeedRequest` object and responds
|
||||
with a `StartDatafeedResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-start-datafeed-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Start Datafeed Request
|
||||
|
||||
A `StartDatafeedRequest` object is created referencing a non-null `datafeedId`.
|
||||
A +{request}+ object is created referencing a non-null `datafeedId`.
|
||||
All other fields are optional for the request.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `datafeedId`
|
||||
|
||||
|
@ -23,7 +28,7 @@ The following arguments are optional.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-request-options]
|
||||
include-tagged::{doc-tests-file}[{api}-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Set when the datafeed should end, the value is exclusive.
|
||||
May be an epoch seconds, epoch millis or an ISO 8601 string.
|
||||
|
@ -35,37 +40,4 @@ If you do not specify a start time and the datafeed is associated with a new job
|
|||
the analysis starts from the earliest time for which data is available.
|
||||
<3> Set the timeout for the request
|
||||
|
||||
[[java-rest-high-x-pack-ml-start-datafeed-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-execute]
|
||||
--------------------------------------------------
|
||||
<1> Did the datafeed successfully start?
|
||||
|
||||
[[java-rest-high-x-pack-ml-start-datafeed-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `StartDatafeedRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `StartDatafeedResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
[[java-rest-high-x-pack-ml-stop-datafeed]]
|
||||
--
|
||||
:api: stop-datafeed
|
||||
:request: StopDatafeedRequest
|
||||
:response: StopDatafeedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Stop Datafeed API
|
||||
|
||||
The Stop Datafeed API provides the ability to stop a {ml} datafeed in the cluster.
|
||||
It accepts a `StopDatafeedRequest` object and responds
|
||||
with a `StopDatafeedResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-stop-datafeed-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Stop Datafeed Request
|
||||
|
||||
A `StopDatafeedRequest` object is created referencing any number of non-null `datafeedId` entries.
|
||||
A +{request}+ object is created referencing any number of non-null `datafeedId` entries.
|
||||
Wildcards and `_all` are also accepted.
|
||||
All other fields are optional for the request.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing existing `datafeedId` entries.
|
||||
|
||||
|
@ -24,43 +29,10 @@ The following arguments are optional.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-request-options]
|
||||
include-tagged::{doc-tests-file}[{api}-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string)
|
||||
<2> If true, the datafeed is stopped forcefully.
|
||||
<3> Controls the amount of time to wait until a datafeed stops. The default value is 20 seconds.
|
||||
|
||||
[[java-rest-high-x-pack-ml-stop-datafeed-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-execute]
|
||||
--------------------------------------------------
|
||||
<1> Did the datafeed successfully stop?
|
||||
|
||||
[[java-rest-high-x-pack-ml-stop-datafeed-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `StopDatafeedRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `StopDatafeedResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
include::../execution.asciidoc[]
|
|
@ -1,18 +1,23 @@
|
|||
[[java-rest-high-x-pack-ml-update-job]]
|
||||
--
|
||||
:api: update-job
|
||||
:request: UpdateJobRequest
|
||||
:response: PutJobResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Update Job API
|
||||
|
||||
The Update Job API provides the ability to update a {ml} job.
|
||||
It accepts a `UpdateJobRequest` object and responds
|
||||
with a `PutJobResponse` object.
|
||||
It accepts a +{request}+ object and responds
|
||||
with a +{response}+ object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-update-job-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Update Job Request
|
||||
|
||||
An `UpdateJobRequest` object gets created with a `JobUpdate` object.
|
||||
An +{request}+ object gets created with a `JobUpdate` object.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing a `JobUpdate` object
|
||||
|
||||
|
@ -23,7 +28,7 @@ job. An existing, non-null `jobId` must be referenced in its creation.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-options]
|
||||
include-tagged::{doc-tests-file}[{api}-options]
|
||||
--------------------------------------------------
|
||||
<1> Mandatory, non-null `jobId` referencing an existing {ml} job
|
||||
<2> Updated description
|
||||
|
@ -41,53 +46,21 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-op
|
|||
Included with these options are specific optional `JobUpdate.DetectorUpdate` updates.
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-detector-options]
|
||||
include-tagged::{doc-tests-file}[{api}-detector-options]
|
||||
--------------------------------------------------
|
||||
<1> The index of the detector. `O` means unknown
|
||||
<2> The optional description of the detector
|
||||
<3> The `DetectionRule` rules that apply to this detector
|
||||
|
||||
[[java-rest-high-x-pack-ml-update-job-execution]]
|
||||
==== Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-update-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `UpdateJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `PutJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-update-job-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Update Job Response
|
||||
|
||||
A `PutJobResponse` contains the updated `Job` object
|
||||
A +{response}+ contains the updated `Job` object
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `getResponse()` returns the updated `Job` object
|
||||
|
|
|
@ -11,6 +11,7 @@ The Java High Level REST Client supports the following Document APIs:
|
|||
Single document APIs::
|
||||
* <<{upid}-index>>
|
||||
* <<{upid}-get>>
|
||||
* <<{upid}-exists>>
|
||||
* <<{upid}-delete>>
|
||||
* <<{upid}-update>>
|
||||
|
||||
|
@ -219,33 +220,37 @@ include::licensing/get-license.asciidoc[]
|
|||
include::licensing/delete-license.asciidoc[]
|
||||
|
||||
== Machine Learning APIs
|
||||
:upid: {mainid}-x-pack-ml
|
||||
:doc-tests-file: {doc-tests}/MlClientDocumentationIT.java
|
||||
|
||||
The Java High Level REST Client supports the following Machine Learning APIs:
|
||||
|
||||
* <<java-rest-high-x-pack-ml-put-job>>
|
||||
* <<java-rest-high-x-pack-ml-get-job>>
|
||||
* <<java-rest-high-x-pack-ml-delete-job>>
|
||||
* <<java-rest-high-x-pack-ml-open-job>>
|
||||
* <<java-rest-high-x-pack-ml-close-job>>
|
||||
* <<java-rest-high-x-pack-ml-flush-job>>
|
||||
* <<java-rest-high-x-pack-ml-update-job>>
|
||||
* <<java-rest-high-x-pack-ml-get-job-stats>>
|
||||
* <<java-rest-high-x-pack-ml-put-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-get-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-delete-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-start-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-stop-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-forecast-job>>
|
||||
* <<java-rest-high-x-pack-ml-delete-forecast>>
|
||||
* <<java-rest-high-x-pack-ml-get-buckets>>
|
||||
* <<java-rest-high-x-pack-ml-get-overall-buckets>>
|
||||
* <<java-rest-high-x-pack-ml-get-records>>
|
||||
* <<java-rest-high-x-pack-ml-post-data>>
|
||||
* <<java-rest-high-x-pack-ml-get-influencers>>
|
||||
* <<java-rest-high-x-pack-ml-get-categories>>
|
||||
* <<java-rest-high-x-pack-ml-get-calendars>>
|
||||
* <<java-rest-high-x-pack-ml-put-calendar>>
|
||||
* <<java-rest-high-x-pack-ml-delete-calendar>>
|
||||
* <<{upid}-put-job>>
|
||||
* <<{upid}-get-job>>
|
||||
* <<{upid}-delete-job>>
|
||||
* <<{upid}-open-job>>
|
||||
* <<{upid}-close-job>>
|
||||
* <<{upid}-flush-job>>
|
||||
* <<{upid}-update-job>>
|
||||
* <<{upid}-get-job-stats>>
|
||||
* <<{upid}-put-datafeed>>
|
||||
* <<{upid}-get-datafeed>>
|
||||
* <<{upid}-delete-datafeed>>
|
||||
* <<{upid}-preview-datafeed>>
|
||||
* <<{upid}-start-datafeed>>
|
||||
* <<{upid}-stop-datafeed>>
|
||||
* <<{upid}-get-datafeed-stats>>
|
||||
* <<{upid}-forecast-job>>
|
||||
* <<{upid}-delete-forecast>>
|
||||
* <<{upid}-get-buckets>>
|
||||
* <<{upid}-get-overall-buckets>>
|
||||
* <<{upid}-get-records>>
|
||||
* <<{upid}-post-data>>
|
||||
* <<{upid}-get-influencers>>
|
||||
* <<{upid}-get-categories>>
|
||||
* <<{upid}-get-calendars>>
|
||||
* <<{upid}-put-calendar>>
|
||||
* <<{upid}-delete-calendar>>
|
||||
|
||||
include::ml/put-job.asciidoc[]
|
||||
include::ml/get-job.asciidoc[]
|
||||
|
@ -257,8 +262,10 @@ include::ml/flush-job.asciidoc[]
|
|||
include::ml/put-datafeed.asciidoc[]
|
||||
include::ml/get-datafeed.asciidoc[]
|
||||
include::ml/delete-datafeed.asciidoc[]
|
||||
include::ml/preview-datafeed.asciidoc[]
|
||||
include::ml/start-datafeed.asciidoc[]
|
||||
include::ml/stop-datafeed.asciidoc[]
|
||||
include::ml/get-datafeed-stats.asciidoc[]
|
||||
include::ml/get-job-stats.asciidoc[]
|
||||
include::ml/forecast-job.asciidoc[]
|
||||
include::ml/delete-forecast.asciidoc[]
|
||||
|
@ -306,15 +313,20 @@ include::security/change-password.asciidoc[]
|
|||
|
||||
== Watcher APIs
|
||||
|
||||
:upid: {mainid}-watcher
|
||||
:doc-tests-file: {doc-tests}/WatcherDocumentationIT.java
|
||||
|
||||
The Java High Level REST Client supports the following Watcher APIs:
|
||||
|
||||
* <<java-rest-high-x-pack-watcher-put-watch>>
|
||||
* <<java-rest-high-x-pack-watcher-delete-watch>>
|
||||
* <<java-rest-high-watcher-ack-watch>>
|
||||
* <<{upid}-ack-watch>>
|
||||
* <<{upid}-activate-watch>>
|
||||
|
||||
include::watcher/put-watch.asciidoc[]
|
||||
include::watcher/delete-watch.asciidoc[]
|
||||
include::watcher/ack-watch.asciidoc[]
|
||||
include::watcher/activate-watch.asciidoc[]
|
||||
|
||||
== Graph APIs
|
||||
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
[[java-rest-high-watcher-ack-watch]]
|
||||
--
|
||||
:api: ack-watch
|
||||
:request: AckWatchRequest
|
||||
:response: AckWatchResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Ack Watch API
|
||||
|
||||
[[java-rest-high-watcher-ack-watch-execution]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Execution
|
||||
|
||||
{xpack-ref}/actions.html#actions-ack-throttle[Acknowledging a watch] enables you
|
||||
|
@ -10,48 +16,23 @@ through the following request:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> The ID of the watch to ack.
|
||||
<2> An optional list of IDs representing the watch actions that should be acked.
|
||||
If no action IDs are provided, then all of the watch's actions will be acked.
|
||||
|
||||
[[java-rest-high-watcher-ack-watch-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned `AckWatchResponse` contains the new status of the requested watch:
|
||||
The returned +{response}+ contains the new status of the requested watch:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The status of a specific action that was acked.
|
||||
<2> The acknowledgement state of the action. If the action was successfully
|
||||
acked, this state will be equal to `AckStatus.State.ACKED`.
|
||||
|
||||
[[java-rest-high-watcher-ack-watch-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `AckWatchRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
completes, the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A listener for `AckWatchResponse` can be constructed as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
||||
include::../execution.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
--
|
||||
:api: activate-watch
|
||||
:request: ActivateWatchRequest
|
||||
:response: ActivateWatchResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Activate Watch API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Execution
|
||||
|
||||
A watch can be activated as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ contains the new status of the activated watch.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `watchStatus` contains status of the watch
|
||||
|
||||
[id="{upid}-{api}-request-async"]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-async]
|
||||
--------------------------------------------------
|
||||
<1> The +{request}+ to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for +{response}+ looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
|
@ -307,7 +307,7 @@ You can also customize the response consumer used to buffer the asynchronous
|
|||
responses. The default consumer will buffer up to 100MB of response on the
|
||||
JVM heap. If the response is larger then the request will fail. You could,
|
||||
for example, lower the maximum size which might be useful if you are running
|
||||
in a heap constrained environment like the exmaple above.
|
||||
in a heap constrained environment like the example above.
|
||||
|
||||
Once you've created the singleton you can use it when making requests:
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
Painless has a strict whitelist for methods and classes to ensure all
|
||||
painless scripts are secure. Most of these methods are exposed directly
|
||||
from the Java Runtime Enviroment (JRE) while others are part of
|
||||
from the Java Runtime Environment (JRE) while others are part of
|
||||
Elasticsearch or Painless itself. Below is a list of all available
|
||||
classes grouped with their respected methods. Clicking on the method
|
||||
name takes you to the documentation for that specific method. Methods
|
||||
|
|
|
@ -32,7 +32,7 @@ PUT _snapshot/my_hdfs_repository
|
|||
"type": "hdfs",
|
||||
"settings": {
|
||||
"uri": "hdfs://namenode:8020/",
|
||||
"path": "elasticsearch/respositories/my_hdfs_repository",
|
||||
"path": "elasticsearch/repositories/my_hdfs_repository",
|
||||
"conf.dfs.client.read.shortcircuit": "true"
|
||||
}
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ PUT _snapshot/my_hdfs_repository
|
|||
"type": "hdfs",
|
||||
"settings": {
|
||||
"uri": "hdfs://namenode:8020/",
|
||||
"path": "/user/elasticsearch/respositories/my_hdfs_repository",
|
||||
"path": "/user/elasticsearch/repositories/my_hdfs_repository",
|
||||
"security.principal": "elasticsearch@REALM"
|
||||
}
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ PUT _snapshot/my_hdfs_repository
|
|||
"type": "hdfs",
|
||||
"settings": {
|
||||
"uri": "hdfs://namenode:8020/",
|
||||
"path": "/user/elasticsearch/respositories/my_hdfs_repository",
|
||||
"path": "/user/elasticsearch/repositories/my_hdfs_repository",
|
||||
"security.principal": "elasticsearch/_HOST@REALM"
|
||||
}
|
||||
}
|
||||
|
@ -186,4 +186,4 @@ extracts for file access checks will be `elasticsearch`.
|
|||
|
||||
NOTE: The repository plugin makes no assumptions of what Elasticsearch's principal name is. The main fragment of the
|
||||
Kerberos principal is not required to be `elasticsearch`. If you have a principal or service name that works better
|
||||
for you or your organization then feel free to use it instead!
|
||||
for you or your organization then feel free to use it instead!
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
[[search-aggregations-bucket-composite-aggregation]]
|
||||
=== Composite Aggregation
|
||||
|
||||
beta[]
|
||||
|
||||
A multi-bucket aggregation that creates composite buckets from different sources.
|
||||
|
||||
Unlike the other `multi-bucket` aggregation the `composite` aggregation can be used
|
||||
|
|
|
@ -89,7 +89,7 @@ Response:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
// TESTRESPONSE[skip:historically skipped]
|
||||
|
||||
The results show that "h5n1" is one of several terms strongly associated with bird flu.
|
||||
It only occurs 5 times in our index as a whole (see the `bg_count`) and yet 4 of these
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue