Merge branch 'master' into zen2

This commit is contained in:
David Turner 2018-10-16 14:41:14 +01:00
commit 950ca3adda
662 changed files with 12494 additions and 5559 deletions

View File

@ -161,7 +161,7 @@ Please follow these formatting guidelines:
* Line width is 140 characters
* The rest is left to Java coding standards
* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do.
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them.
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE:
* Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value.
* IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value.
* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so.
@ -320,7 +320,7 @@ have to test Elasticsearch.
#### Configurations
Gradle organizes dependencies and build artifacts into "configurations" and
allows you to use these configurations arbitrarilly. Here are some of the most
allows you to use these configurations arbitrarily. Here are some of the most
common configurations in our build and how we use them:
<dl>

View File

@ -1,4 +1,4 @@
[[Testing Framework Cheatsheet]]
[[TestingFrameworkCheatsheet]]
= Testing
[partintro]
@ -250,7 +250,7 @@ Pass arbitrary jvm arguments.
Running backwards compatibility tests is disabled by default since it
requires a release version of elasticsearch to be present on the test system.
To run backwards compatibilty tests untar or unzip a release and run the tests
To run backwards compatibility tests untar or unzip a release and run the tests
with the following command:
---------------------------------------------------------------------------

View File

@ -44,6 +44,8 @@ compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-u
// needs to be added separately otherwise Gradle will quote it and javac will fail
compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
run.executable = new File(project.runtimeJavaHome, 'bin/java')
// classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
forbiddenApisMain.enabled = false

View File

@ -0,0 +1,94 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.benchmark.fs;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(3)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
public class AvailableIndexFoldersBenchmark {
private NodeEnvironment.NodePath nodePath;
private NodeEnvironment nodeEnv;
private Set<String> excludedDirs;
@Setup
public void setup() throws IOException {
Path path = Files.createTempDirectory("test");
String[] paths = new String[] {path.toString()};
nodePath = new NodeEnvironment.NodePath(path);
LogConfigurator.setNodeName("test");
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
nodeEnv = new NodeEnvironment(settings, new Environment(settings, null));
Files.createDirectories(nodePath.indicesPath);
excludedDirs = new HashSet<>();
int numIndices = 5000;
for (int i = 0; i < numIndices; i++) {
String dirName = "dir" + i;
Files.createDirectory(nodePath.indicesPath.resolve(dirName));
excludedDirs.add(dirName);
}
if (nodeEnv.availableIndexFoldersForPath(nodePath).size() != numIndices) {
throw new IllegalStateException("bad size");
}
if (nodeEnv.availableIndexFoldersForPath(nodePath, excludedDirs::contains).size() != 0) {
throw new IllegalStateException("bad size");
}
}
@Benchmark
public Set<String> availableIndexFolderNaive() throws IOException {
return nodeEnv.availableIndexFoldersForPath(nodePath);
}
@Benchmark
public Set<String> availableIndexFolderOptimized() throws IOException {
return nodeEnv.availableIndexFoldersForPath(nodePath, excludedDirs::contains);
}
}

View File

@ -264,7 +264,11 @@ class RandomizedTestingTask extends DefaultTask {
throw new InvalidUserDataException('Seed should be ' +
'set on the project instead of a system property')
}
sysproperty key: prop.getKey(), value: prop.getValue().toString()
if (prop.getValue() instanceof Closure) {
sysproperty key: prop.getKey(), value: (prop.getValue() as Closure).call().toString()
} else {
sysproperty key: prop.getKey(), value: prop.getValue().toString()
}
}
systemProperty 'tests.seed', project.testSeed
for (Map.Entry<String, Object> envvar : environmentVariables) {

View File

@ -122,7 +122,7 @@ class VersionCollection {
if (isReleased(version) == false) {
// caveat 1 - This should only ever contain 2 non released branches in flight. An example is 6.x is frozen,
// and 6.2 is cut but not yet released there is some simple logic to make sure that in the case of more than 2,
// it will bail. The order is that the minor snapshot is fufilled first, and then the staged minor snapshot
// it will bail. The order is that the minor snapshot is fulfilled first, and then the staged minor snapshot
if (nextMinorSnapshot == null) {
// it has not been set yet
nextMinorSnapshot = replaceAsSnapshot(version)

View File

@ -72,7 +72,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
/**
* Root directory containing all the files generated by this task. It is
* contained withing testRoot.
* contained within testRoot.
*/
File outputRoot() {
return new File(testRoot, '/rest-api-spec/test')
@ -226,10 +226,10 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
} else {
current.println('---')
current.println("\"line_$test.start\":")
/* The Elasticsearch test runner doesn't support the warnings
* construct unless you output this skip. Since we don't know
* if this snippet will use the warnings construct we emit this
* warning every time. */
/* The Elasticsearch test runner doesn't support quite a few
* constructs unless we output this skip. We don't know if
* we're going to use these constructs, but we might so we
* output the skip just in case. */
current.println(" - skip:")
current.println(" features: ")
current.println(" - default_shards")
@ -250,13 +250,13 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
}
}
}
if (test.skipTest) {
if (test.skip) {
if (test.continued) {
throw new InvalidUserDataException("Continued snippets "
+ "can't be skipped")
}
current.println(" - always_skip")
current.println(" reason: $test.skipTest")
current.println(" reason: $test.skip")
}
if (test.setup != null) {
// Insert a setup defined outside of the docs
@ -274,9 +274,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
}
private void response(Snippet response) {
current.println(" - match: ")
current.println(" \$body: ")
response.contents.eachLine { current.println(" $it") }
if (null == response.skip) {
current.println(" - match: ")
current.println(" \$body: ")
response.contents.eachLine { current.println(" $it") }
}
}
void emitDo(String method, String pathAndQuery, String body,

View File

@ -122,7 +122,9 @@ public class SnippetsTask extends DefaultTask {
+ "contain `curl`.")
}
}
if (snippet.testResponse && snippet.language == 'js') {
if (snippet.testResponse
&& 'js' == snippet.language
&& null == snippet.skip) {
String quoted = snippet.contents
// quote values starting with $
.replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"')
@ -216,7 +218,7 @@ public class SnippetsTask extends DefaultTask {
return
}
if (it.group(4) != null) {
snippet.skipTest = it.group(4)
snippet.skip = it.group(4)
return
}
if (it.group(5) != null) {
@ -249,7 +251,7 @@ public class SnippetsTask extends DefaultTask {
substitutions = []
}
String loc = "$file:$lineNumber"
parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT) ?/) {
parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT|$SKIP) ?/) {
if (it.group(1) != null) {
// TESTRESPONSE[s/adsf/jkl/]
substitutions.add([it.group(1), it.group(2)])
@ -259,6 +261,9 @@ public class SnippetsTask extends DefaultTask {
substitutions.add(['\n$', '\\\\s*/'])
substitutions.add(['( +)', '$1\\\\s+'])
substitutions.add(['\n', '\\\\s*\n '])
} else if (it.group(4) != null) {
// TESTRESPONSE[skip:reason]
snippet.skip = it.group(4)
}
}
}
@ -312,7 +317,7 @@ public class SnippetsTask extends DefaultTask {
boolean test = false
boolean testResponse = false
boolean testSetup = false
String skipTest = null
String skip = null
boolean continued = false
String language = null
String catchPart = null
@ -337,8 +342,8 @@ public class SnippetsTask extends DefaultTask {
if (catchPart) {
result += "[catch: $catchPart]"
}
if (skipTest) {
result += "[skip=$skipTest]"
if (skip) {
result += "[skip=$skip]"
}
if (continued) {
result += '[continued]'
@ -352,6 +357,9 @@ public class SnippetsTask extends DefaultTask {
}
if (testResponse) {
result += '// TESTRESPONSE'
if (skip) {
result += "[skip=$skip]"
}
}
if (testSetup) {
result += '// TESTSETUP'

View File

@ -337,7 +337,7 @@ class NodeInfo {
case 'deb':
return new File(baseDir, "${distro}-extracted/etc/elasticsearch")
default:
throw new InvalidUserDataException("Unkown distribution: ${distro}")
throw new InvalidUserDataException("Unknown distribution: ${distro}")
}
}
}

View File

@ -43,7 +43,7 @@ public class JdkJarHellCheck {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
String entry = root.relativize(file).toString().replace('\\', '/');
if (entry.endsWith(".class")) {
if (entry.endsWith(".class") && entry.endsWith("module-info.class") == false) {
if (ext.getResource(entry) != null) {
detected.add(
entry

View File

@ -20,12 +20,12 @@ package org.elasticsearch.gradle.precommit;
import org.apache.commons.io.output.NullOutputStream;
import org.elasticsearch.gradle.JdkJarHellCheck;
import org.elasticsearch.test.NamingConventionsCheck;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.JavaVersion;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.InputFiles;
@ -47,6 +47,7 @@ import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class ThirdPartyAuditTask extends DefaultTask {
@ -171,19 +172,38 @@ public class ThirdPartyAuditTask extends DefaultTask {
File jarExpandDir = getJarExpandDir();
// We need to clean up to make sure old dependencies don't linger
getProject().delete(jarExpandDir);
jars.forEach(jar ->
jars.forEach(jar -> {
FileTree jarFiles = getProject().zipTree(jar);
getProject().copy(spec -> {
spec.from(jarFiles);
spec.into(jarExpandDir);
// exclude classes from multi release jars
spec.exclude("META-INF/versions/**");
});
// Deal with multi release jars:
// The order is important, we iterate here so we don't depend on the order in which Gradle executes the spec
// We extract multi release jar classes ( if these exist ) going from 9 - the first to support them, to the
// current `targetCompatibility` version.
// Each extract will overwrite the top level classes that existed before it, the result is that we end up
// with a single version of the class in `jarExpandDir`.
// This will be the closes version to `targetCompatibility`, the same class that would be loaded in a JVM
// that has `targetCompatibility` version.
// This means we only scan classes that would be loaded into `targetCompatibility`, and don't look at any
// pther version specific implementation of said classes.
IntStream.rangeClosed(
Integer.parseInt(JavaVersion.VERSION_1_9.getMajorVersion()),
Integer.parseInt(targetCompatibility.getMajorVersion())
).forEach(majorVersion -> getProject().copy(spec -> {
spec.from(getProject().zipTree(jar));
spec.into(jarExpandDir);
// Exclude classes for multi release jars above target
for (int i = Integer.parseInt(targetCompatibility.getMajorVersion()) + 1;
i <= Integer.parseInt(JavaVersion.VERSION_HIGHER.getMajorVersion());
i++
) {
spec.exclude("META-INF/versions/" + i + "/**");
}
})
);
String metaInfPrefix = "META-INF/versions/" + majorVersion;
spec.include(metaInfPrefix + "/**");
// Drop the version specific prefix
spec.eachFile(details -> details.setPath(details.getPath().replace(metaInfPrefix, "")));
spec.setIncludeEmptyDirs(false);
}));
});
}
private void assertNoJarHell(Set<String> jdkJarHellClasses) {
@ -276,9 +296,9 @@ public class ThirdPartyAuditTask extends DefaultTask {
private Set<String> runJdkJarHellCheck() throws IOException {
ByteArrayOutputStream standardOut = new ByteArrayOutputStream();
ExecResult execResult = getProject().javaexec(spec -> {
URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation();
URL location = JdkJarHellCheck.class.getProtectionDomain().getCodeSource().getLocation();
if (location.getProtocol().equals("file") == false) {
throw new GradleException("Unexpected location for NamingConventionCheck class: " + location);
throw new GradleException("Unexpected location for JdkJarHellCheck class: " + location);
}
try {
spec.classpath(

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.clusterformation;
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.Version;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.clusterformation;
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.Distribution;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.clusterformation;
package org.elasticsearch.gradle.testclusters;
import groovy.lang.Closure;
import org.elasticsearch.GradleServicesAdapter;
@ -37,12 +37,12 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ClusterformationPlugin implements Plugin<Project> {
public class TestClustersPlugin implements Plugin<Project> {
public static final String LIST_TASK_NAME = "listElasticSearchClusters";
public static final String EXTENSION_NAME = "elasticSearchClusters";
private final Logger logger = Logging.getLogger(ClusterformationPlugin.class);
private final Logger logger = Logging.getLogger(TestClustersPlugin.class);
@Override
public void apply(Project project) {

View File

@ -1 +0,0 @@
implementation-class=org.elasticsearch.gradle.clusterformation.ClusterformationPlugin

View File

@ -0,0 +1 @@
implementation-class=org.elasticsearch.gradle.testclusters.TestClustersPlugin

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.clusterformation;
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
@ -26,11 +26,11 @@ import org.gradle.testkit.runner.TaskOutcome;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public class TestClustersPluginIT extends GradleIntegrationTestCase {
public void testListClusters() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("listElasticSearchClusters", "-s")
.withPluginClasspath()
.build();
@ -45,7 +45,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public void testUseClusterByOne() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("user1", "-s")
.withPluginClasspath()
.build();
@ -60,7 +60,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public void testUseClusterByOneWithDryRun() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("user1", "-s", "--dry-run")
.withPluginClasspath()
.build();
@ -75,7 +75,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public void testUseClusterByTwo() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("user1", "user2", "-s")
.withPluginClasspath()
.build();
@ -92,7 +92,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public void testUseClusterByUpToDateTask() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("upToDate1", "upToDate2", "-s")
.withPluginClasspath()
.build();
@ -109,7 +109,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public void testUseClusterBySkippedTask() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("skipped1", "skipped2", "-s")
.withPluginClasspath()
.build();
@ -126,7 +126,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase {
public void tetUseClusterBySkippedAndWorkingTask() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("clusterformation"))
.withProjectDir(getProjectDir("testclusters"))
.withArguments("skipped1", "user1", "-s")
.withPluginClasspath()
.build();

View File

@ -22,7 +22,7 @@ task sample {
// dependsOn buildResources.outputDir
// for now it's just
dependsOn buildResources
// we have to refference it at configuration time in order to be picked up
// we have to reference it at configuration time in order to be picked up
ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml')
doLast {
println "This task is using ${file(checkstyle_suppressions)}"
@ -35,4 +35,4 @@ task noConfigAfterExecution {
println "This should cause an error because we are refferencing " +
"${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
}
}
}

View File

@ -1,5 +1,5 @@
plugins {
id 'elasticsearch.clusterformation'
id 'elasticsearch.testclusters'
}
elasticSearchClusters {

View File

@ -15,6 +15,8 @@ slf4j = 1.6.2
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 4.5.1
netty = 4.1.30.Final
# test dependencies
randomizedrunner = 2.7.0
junit = 4.12

View File

@ -76,6 +76,8 @@ forbiddenApisMain {
addSignatureFiles 'http-signatures'
signaturesFiles += files('src/main/resources/forbidden/rest-high-level-signatures.txt')
}
File nodeCert = file("./testnode.crt")
File nodeTrustStore = file("./testnode.jks")
integTestRunner {
systemProperty 'tests.rest.cluster.username', System.getProperty('tests.rest.cluster.username', 'test_user')
@ -85,11 +87,17 @@ integTestRunner {
integTestCluster {
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
// Truststore settings are not used since TLS is not enabled. Included for testing the get certificates API
setting 'xpack.ssl.certificate_authorities', 'testnode.crt'
setting 'xpack.security.transport.ssl.truststore.path', 'testnode.jks'
setting 'xpack.security.transport.ssl.truststore.password', 'testnode'
setupCommand 'setupDummyUser',
'bin/elasticsearch-users',
'useradd', System.getProperty('tests.rest.cluster.username', 'test_user'),
'-p', System.getProperty('tests.rest.cluster.password', 'test-password'),
'-r', 'superuser'
extraConfigFile nodeCert.name, nodeCert
extraConfigFile nodeTrustStore.name, nodeTrustStore
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -29,6 +29,7 @@ import org.elasticsearch.action.ingest.SimulatePipelineResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import java.io.IOException;
import java.util.Collections;
import static java.util.Collections.emptySet;
@ -83,7 +84,7 @@ public final class IngestClient {
*/
public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options,
GetPipelineResponse::fromXContent, emptySet());
GetPipelineResponse::fromXContent, Collections.singleton(404));
}
/**
@ -96,7 +97,7 @@ public final class IngestClient {
*/
public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener<GetPipelineResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options,
GetPipelineResponse::fromXContent, listener, emptySet());
GetPipelineResponse::fromXContent, listener, Collections.singleton(404));
}
/**

View File

@ -22,6 +22,8 @@ package org.elasticsearch.client;
import org.apache.http.HttpEntity;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.license.StartBasicRequest;
import org.elasticsearch.client.license.StartBasicResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.xcontent.DeprecationHandler;
@ -121,6 +123,28 @@ public final class LicenseClient {
AcknowledgedResponse::fromXContent, listener, emptySet());
}
/**
* Initiates an indefinite basic license.
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public StartBasicResponse startBasic(StartBasicRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, LicenseRequestConverters::startBasic, options,
StartBasicResponse::fromXContent, emptySet());
}
/**
* Asynchronously initiates an indefinite basic license.
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void startBasicAsync(StartBasicRequest request, RequestOptions options,
ActionListener<StartBasicResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, LicenseRequestConverters::startBasic, options,
StartBasicResponse::fromXContent, listener, emptySet());
}
/**
* Converts an entire response into a json string
*

View File

@ -21,7 +21,9 @@ package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.license.StartBasicRequest;
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
@ -61,4 +63,18 @@ public class LicenseRequestConverters {
parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout());
return request;
}
static Request startBasic(StartBasicRequest startBasicRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_xpack", "license", "start_basic")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request);
parameters.withTimeout(startBasicRequest.timeout());
parameters.withMasterTimeout(startBasicRequest.masterNodeTimeout());
if (startBasicRequest.isAcknowledge()) {
parameters.putParam("acknowledge", "true");
}
return request;
}
}

View File

@ -38,6 +38,7 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetInfluencersRequest;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobStatsRequest;
@ -45,6 +46,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutJobRequest;
@ -146,7 +148,12 @@ final class MLRequestConverters {
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request);
params.putParam("force", Boolean.toString(deleteJobRequest.isForce()));
if (deleteJobRequest.getForce() != null) {
params.putParam("force", Boolean.toString(deleteJobRequest.getForce()));
}
if (deleteJobRequest.getWaitForCompletion() != null) {
params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion()));
}
return request;
}
@ -259,6 +266,34 @@ final class MLRequestConverters {
return request;
}
static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("datafeeds")
.addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds()))
.addPathPartAsIs("_stats")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request);
if (getDatafeedStatsRequest.isAllowNoDatafeeds() != null) {
params.putParam("allow_no_datafeeds", Boolean.toString(getDatafeedStatsRequest.isAllowNoDatafeeds()));
}
return request;
}
static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("datafeeds")
.addPathPart(previewDatafeedRequest.getDatafeedId())
.addPathPartAsIs("_preview")
.build();
return new Request(HttpGet.METHOD_NAME, endpoint);
}
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")

View File

@ -26,6 +26,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
@ -38,6 +39,8 @@ import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetCategoriesResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsResponse;
import org.elasticsearch.client.ml.GetInfluencersRequest;
import org.elasticsearch.client.ml.GetInfluencersResponse;
import org.elasticsearch.client.ml.GetJobRequest;
@ -52,6 +55,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -181,7 +186,7 @@ public final class MachineLearningClient {
}
/**
* Gets one or more Machine Learning job configuration info, asynchronously.
* Gets usage statistics for one or more Machine Learning jobs, asynchronously.
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get job stats docs</a>
@ -207,14 +212,15 @@ public final class MachineLearningClient {
*
* @param request The request to delete the job
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for
* completion
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::deleteJob,
options,
AcknowledgedResponse::fromXContent,
DeleteJobResponse::fromXContent,
Collections.emptySet());
}
@ -228,11 +234,11 @@ public final class MachineLearningClient {
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::deleteJob,
options,
AcknowledgedResponse::fromXContent,
DeleteJobResponse::fromXContent,
listener,
Collections.emptySet());
}
@ -649,6 +655,90 @@ public final class MachineLearningClient {
Collections.emptySet());
}
/**
* Gets statistics for one or more Machine Learning datafeeds
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html">Get datafeed stats docs</a>
*
* @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link GetDatafeedStatsResponse} response object containing
* the {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} objects and the number of datafeeds found
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetDatafeedStatsResponse getDatafeedStats(GetDatafeedStatsRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::getDatafeedStats,
options,
GetDatafeedStatsResponse::fromXContent,
Collections.emptySet());
}
/**
* Previews the given Machine Learning Datafeed
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html">
* ML Preview Datafeed documentation</a>
*
* @param request The request to preview the datafeed
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in
* JSON format
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::previewDatafeed,
options,
PreviewDatafeedResponse::fromXContent,
Collections.emptySet());
}
/**
* Gets statistics for one or more Machine Learning datafeeds, asynchronously.
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html">Get datafeed stats docs</a>
*
* @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified with {@link GetDatafeedStatsResponse} upon request completion
*/
public void getDatafeedStatsAsync(GetDatafeedStatsRequest request,
RequestOptions options,
ActionListener<GetDatafeedStatsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::getDatafeedStats,
options,
GetDatafeedStatsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html">
* ML Preview Datafeed documentation</a>
*
* @param request The request to preview the datafeed
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void previewDatafeedAsync(PreviewDatafeedRequest request,
RequestOptions options,
ActionListener<PreviewDatafeedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::previewDatafeed,
options,
PreviewDatafeedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
* <p>

View File

@ -980,9 +980,11 @@ final class RequestConverters {
return this;
}
EndpointBuilder addPathPartAsIs(String part) {
if (Strings.hasLength(part)) {
joiner.add(part);
EndpointBuilder addPathPartAsIs(String ... parts) {
for (String part : parts) {
if (Strings.hasLength(part)) {
joiner.add(part);
}
}
return this;
}

View File

@ -20,6 +20,8 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupJobRequest;
import org.elasticsearch.client.rollup.GetRollupJobResponse;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
@ -76,6 +78,40 @@ public class RollupClient {
listener, Collections.emptySet());
}
/**
* Delete a rollup job from the cluster
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-delete-job.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public DeleteRollupJobResponse deleteRollupJob(DeleteRollupJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
RollupRequestConverters::deleteJob,
options,
DeleteRollupJobResponse::fromXContent,
Collections.emptySet());
}
/**
* Asynchronously delete a rollup job from the cluster
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-delete-job.html">
* The docs</a> for details.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void deleteRollupJobAsync(DeleteRollupJobRequest request,
RequestOptions options,
ActionListener<DeleteRollupJobResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
RollupRequestConverters::deleteJob,
options,
DeleteRollupJobResponse::fromXContent,
listener, Collections.emptySet());
}
/**
* Get a rollup job from the cluster.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">

View File

@ -18,8 +18,10 @@
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.GetRollupJobRequest;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
@ -54,4 +56,16 @@ final class RollupRequestConverters {
.build();
return new Request(HttpGet.METHOD_NAME, endpoint);
}
static Request deleteJob(final DeleteRollupJobRequest deleteRollupJobRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("rollup")
.addPathPartAsIs("job")
.addPathPart(deleteRollupJobRequest.getId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
request.setEntity(createEntity(deleteRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
}

View File

@ -20,8 +20,12 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.security.PutRoleMappingRequest;
import org.elasticsearch.client.security.PutRoleMappingResponse;
import org.elasticsearch.client.security.DisableUserRequest;
import org.elasticsearch.client.security.EnableUserRequest;
import org.elasticsearch.client.security.GetSslCertificatesRequest;
import org.elasticsearch.client.security.GetSslCertificatesResponse;
import org.elasticsearch.client.security.PutUserRequest;
import org.elasticsearch.client.security.PutUserResponse;
import org.elasticsearch.client.security.EmptyResponse;
@ -73,6 +77,34 @@ public final class SecurityClient {
PutUserResponse::fromXContent, listener, emptySet());
}
/**
* Create/Update a role mapping.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html">
* the docs</a> for more.
* @param request the request with the role mapping information
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response from the put role mapping call
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public PutRoleMappingResponse putRoleMapping(final PutRoleMappingRequest request, final RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::putRoleMapping, options,
PutRoleMappingResponse::fromXContent, emptySet());
}
/**
* Asynchronously create/update a role mapping.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html">
* the docs</a> for more.
* @param request the request with the role mapping information
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void putRoleMappingAsync(final PutRoleMappingRequest request, final RequestOptions options,
final ActionListener<PutRoleMappingResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::putRoleMapping, options,
PutRoleMappingResponse::fromXContent, listener, emptySet());
}
/**
* Enable a native realm or built-in user synchronously.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
@ -133,6 +165,33 @@ public final class SecurityClient {
EmptyResponse::fromXContent, listener, emptySet());
}
/**
* Synchronously retrieve the X.509 certificates that are used to encrypt communications in an Elasticsearch cluster.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-ssl.html">
* the docs</a> for more.
*
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response from the get certificates call
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetSslCertificatesResponse getSslCertificates(RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(GetSslCertificatesRequest.INSTANCE, GetSslCertificatesRequest::getRequest,
options, GetSslCertificatesResponse::fromXContent, emptySet());
}
/**
* Asynchronously retrieve the X.509 certificates that are used to encrypt communications in an Elasticsearch cluster.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-ssl.html">
* the docs</a> for more.
*
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void getSslCertificatesAsync(RequestOptions options, ActionListener<GetSslCertificatesResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(GetSslCertificatesRequest.INSTANCE, GetSslCertificatesRequest::getRequest,
options, GetSslCertificatesResponse::fromXContent, listener, emptySet());
}
/**
* Change the password of a user of a native realm or built-in user synchronously.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html">

View File

@ -21,6 +21,7 @@ package org.elasticsearch.client;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.security.PutRoleMappingRequest;
import org.elasticsearch.client.security.DisableUserRequest;
import org.elasticsearch.client.security.EnableUserRequest;
import org.elasticsearch.client.security.ChangePasswordRequest;
@ -61,6 +62,18 @@ final class SecurityRequestConverters {
return request;
}
static Request putRoleMapping(final PutRoleMappingRequest putRoleMappingRequest) throws IOException {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_xpack/security/role_mapping")
.addPathPart(putRoleMappingRequest.getName())
.build();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRoleMappingRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params(request);
params.withRefreshPolicy(putRoleMappingRequest.getRefreshPolicy());
return request;
}
static Request enableUser(EnableUserRequest enableUserRequest) {
return setUserEnabled(enableUserRequest);
}

View File

@ -20,6 +20,8 @@ package org.elasticsearch.client;
import org.elasticsearch.common.unit.TimeValue;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
/**
* A base request for any requests that supply timeouts.
*
@ -28,8 +30,11 @@ import org.elasticsearch.common.unit.TimeValue;
*/
public class TimedRequest implements Validatable {
private TimeValue timeout;
private TimeValue masterTimeout;
public static final TimeValue DEFAULT_ACK_TIMEOUT = timeValueSeconds(30);
public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30);
private TimeValue timeout = DEFAULT_ACK_TIMEOUT;
private TimeValue masterTimeout = DEFAULT_MASTER_NODE_TIMEOUT;
public void setTimeout(TimeValue timeout) {
this.timeout = timeout;

View File

@ -19,6 +19,8 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.watcher.ActivateWatchRequest;
import org.elasticsearch.client.watcher.ActivateWatchResponse;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
@ -121,4 +123,31 @@ public final class WatcherClient {
AckWatchResponse::fromXContent, listener, emptySet());
}
/**
* Activate a watch from the cluster
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-activate-watch.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public ActivateWatchResponse activateWatch(ActivateWatchRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::activateWatch, options,
ActivateWatchResponse::fromXContent, singleton(404));
}
/**
* Asynchronously activates a watch from the cluster
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-activate-watch.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void activateWatchAsync(ActivateWatchRequest request, RequestOptions options, ActionListener<ActivateWatchResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::activateWatch, options,
ActivateWatchResponse::fromXContent, listener, singleton(404));
}
}

View File

@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.elasticsearch.client.watcher.ActivateWatchRequest;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
@ -73,4 +74,16 @@ public class WatcherRequestConverters {
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
return request;
}
static Request activateWatch(ActivateWatchRequest activateWatchRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("watcher")
.addPathPartAsIs("watch")
.addPathPart(activateWatchRequest.getWatchId())
.addPathPartAsIs("_activate")
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
return request;
}
}

View File

@ -0,0 +1,38 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.license;
import org.elasticsearch.client.TimedRequest;
public class StartBasicRequest extends TimedRequest {
private final boolean acknowledge;
public StartBasicRequest() {
this(false);
}
public StartBasicRequest(boolean acknowledge) {
this.acknowledge = acknowledge;
}
public boolean isAcknowledge() {
return acknowledge;
}
}

View File

@ -0,0 +1,168 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.license;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class StartBasicResponse {
private static final ConstructingObjectParser<StartBasicResponse, Void> PARSER = new ConstructingObjectParser<>(
"start_basic_response", true, (a, v) -> {
boolean basicWasStarted = (Boolean) a[0];
String errorMessage = (String) a[1];
if (basicWasStarted) {
return new StartBasicResponse(StartBasicResponse.Status.GENERATED_BASIC);
}
StartBasicResponse.Status status = StartBasicResponse.Status.fromErrorMessage(errorMessage);
@SuppressWarnings("unchecked") Tuple<String, Map<String, String[]>> acknowledgements = (Tuple<String, Map<String, String[]>>) a[2];
return new StartBasicResponse(status, acknowledgements.v2(), acknowledgements.v1());
});
static {
PARSER.declareBoolean(constructorArg(), new ParseField("basic_was_started"));
PARSER.declareString(optionalConstructorArg(), new ParseField("error_message"));
PARSER.declareObject(optionalConstructorArg(), (parser, v) -> {
Map<String, String[]> acknowledgeMessages = new HashMap<>();
String message = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (currentFieldName == null) {
throw new XContentParseException(parser.getTokenLocation(), "expected message header or acknowledgement");
}
if (new ParseField("message").getPreferredName().equals(currentFieldName)) {
ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser::getTokenLocation);
message = parser.text();
} else {
if (token != XContentParser.Token.START_ARRAY) {
throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement type");
}
List<String> acknowledgeMessagesList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser::getTokenLocation);
acknowledgeMessagesList.add(parser.text());
}
acknowledgeMessages.put(currentFieldName, acknowledgeMessagesList.toArray(new String[0]));
}
}
}
return new Tuple<>(message, acknowledgeMessages);
},
new ParseField("acknowledge"));
}
private Map<String, String[]> acknowledgeMessages;
private String acknowledgeMessage;
enum Status {
GENERATED_BASIC(true, null, RestStatus.OK),
ALREADY_USING_BASIC(false, "Operation failed: Current license is basic.", RestStatus.FORBIDDEN),
NEED_ACKNOWLEDGEMENT(false, "Operation failed: Needs acknowledgement.", RestStatus.OK);
private final boolean isBasicStarted;
private final String errorMessage;
private final RestStatus restStatus;
Status(boolean isBasicStarted, String errorMessage, RestStatus restStatus) {
this.isBasicStarted = isBasicStarted;
this.errorMessage = errorMessage;
this.restStatus = restStatus;
}
String getErrorMessage() {
return errorMessage;
}
boolean isBasicStarted() {
return isBasicStarted;
}
static StartBasicResponse.Status fromErrorMessage(final String errorMessage) {
final StartBasicResponse.Status[] values = StartBasicResponse.Status.values();
for (StartBasicResponse.Status status : values) {
if (Objects.equals(status.errorMessage, errorMessage)) {
return status;
}
}
throw new IllegalArgumentException("No status for error message ['" + errorMessage + "']");
}
}
private StartBasicResponse.Status status;
public StartBasicResponse() {
}
StartBasicResponse(StartBasicResponse.Status status) {
this(status, Collections.emptyMap(), null);
}
StartBasicResponse(StartBasicResponse.Status status,
Map<String, String[]> acknowledgeMessages, String acknowledgeMessage) {
this.status = status;
this.acknowledgeMessages = acknowledgeMessages;
this.acknowledgeMessage = acknowledgeMessage;
}
public boolean isAcknowledged() {
return status != StartBasicResponse.Status.NEED_ACKNOWLEDGEMENT;
}
public boolean isBasicStarted() {
return status.isBasicStarted;
}
public String getErrorMessage() {
return status.errorMessage;
}
public String getAcknowledgeMessage() {
return acknowledgeMessage;
}
public Map<String, String[]> getAcknowledgeMessages() {
return acknowledgeMessages;
}
public static StartBasicResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -29,7 +29,8 @@ import java.util.Objects;
public class DeleteJobRequest extends ActionRequest {
private String jobId;
private boolean force;
private Boolean force;
private Boolean waitForCompletion;
public DeleteJobRequest(String jobId) {
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
@ -47,7 +48,7 @@ public class DeleteJobRequest extends ActionRequest {
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
}
public boolean isForce() {
public Boolean getForce() {
return force;
}
@ -57,10 +58,24 @@ public class DeleteJobRequest extends ActionRequest {
*
* @param force When {@code true} forcefully delete an opened job. Defaults to {@code false}
*/
public void setForce(boolean force) {
public void setForce(Boolean force) {
this.force = force;
}
public Boolean getWaitForCompletion() {
return waitForCompletion;
}
/**
* Set whether this request should wait until the operation has completed before returning
* @param waitForCompletion When {@code true} the call will wait for the job deletion to complete.
* Otherwise, the deletion will be executed asynchronously and the response
* will contain the task id.
*/
public void setWaitForCompletion(Boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
}
@Override
public ActionRequestValidationException validate() {
return null;

View File

@ -0,0 +1,113 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.tasks.TaskId;
import java.io.IOException;
import java.util.Objects;
/**
* Response object that contains the acknowledgement or the task id
* depending on whether the delete job action was requested to wait for completion.
*/
public class DeleteJobResponse extends ActionResponse implements ToXContentObject {
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
private static final ParseField TASK = new ParseField("task");
public static final ConstructingObjectParser<DeleteJobResponse, Void> PARSER = new ConstructingObjectParser<>("delete_job_response",
true, a-> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1]));
static {
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING);
}
public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final Boolean acknowledged;
private final TaskId task;
DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) {
assert acknowledged != null || task != null;
this.acknowledged = acknowledged;
this.task = task;
}
/**
* Get the action acknowledgement
* @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or
* otherwise a {@code boolean} that indicates whether the job was deleted successfully.
*/
public Boolean getAcknowledged() {
return acknowledged;
}
/**
* Get the task id
* @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or
* otherwise the id of the job deletion task.
*/
public TaskId getTask() {
return task;
}
@Override
public int hashCode() {
return Objects.hash(acknowledged, task);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DeleteJobResponse that = (DeleteJobResponse) other;
return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (acknowledged != null) {
builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged);
}
if (task != null) {
builder.field(TASK.getPreferredName(), task.toString());
}
builder.endObject();
return builder;
}
}

View File

@ -0,0 +1,147 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* Request object to get {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} by their respective datafeedIds
*
* {@code _all} explicitly gets all the datafeeds' statistics in the cluster
* An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster
*/
public class GetDatafeedStatsRequest extends ActionRequest implements ToXContentObject {
public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<GetDatafeedStatsRequest, Void> PARSER = new ConstructingObjectParser<>(
"get_datafeed_stats_request", a -> new GetDatafeedStatsRequest((List<String>) a[0]));
static {
PARSER.declareField(ConstructingObjectParser.constructorArg(),
p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
DatafeedConfig.ID, ObjectParser.ValueType.STRING_ARRAY);
PARSER.declareBoolean(GetDatafeedStatsRequest::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS);
}
private static final String ALL_DATAFEEDS = "_all";
private final List<String> datafeedIds;
private Boolean allowNoDatafeeds;
/**
* Explicitly gets all datafeeds statistics
*
* @return a {@link GetDatafeedStatsRequest} for all existing datafeeds
*/
public static GetDatafeedStatsRequest getAllDatafeedStatsRequest(){
return new GetDatafeedStatsRequest(ALL_DATAFEEDS);
}
GetDatafeedStatsRequest(List<String> datafeedIds) {
if (datafeedIds.stream().anyMatch(Objects::isNull)) {
throw new NullPointerException("datafeedIds must not contain null values");
}
this.datafeedIds = new ArrayList<>(datafeedIds);
}
/**
* Get the specified Datafeed's statistics via their unique datafeedIds
*
* @param datafeedIds must be non-null and each datafeedId must be non-null
*/
public GetDatafeedStatsRequest(String... datafeedIds) {
this(Arrays.asList(datafeedIds));
}
/**
* All the datafeedIds for which to get statistics
*/
public List<String> getDatafeedIds() {
return datafeedIds;
}
public Boolean isAllowNoDatafeeds() {
return this.allowNoDatafeeds;
}
/**
* Whether to ignore if a wildcard expression matches no datafeeds.
*
* This includes {@code _all} string or when no datafeeds have been specified
*
* @param allowNoDatafeeds When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true}
*/
public void setAllowNoDatafeeds(boolean allowNoDatafeeds) {
this.allowNoDatafeeds = allowNoDatafeeds;
}
@Override
public int hashCode() {
return Objects.hash(datafeedIds, allowNoDatafeeds);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
GetDatafeedStatsRequest that = (GetDatafeedStatsRequest) other;
return Objects.equals(datafeedIds, that.datafeedIds) &&
Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds));
if (allowNoDatafeeds != null) {
builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds);
}
builder.endObject();
return builder;
}
}

View File

@ -0,0 +1,89 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Contains a {@link List} of the found {@link DatafeedStats} objects and the total count found
*/
public class GetDatafeedStatsResponse extends AbstractResultResponse<DatafeedStats> {
public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<GetDatafeedStatsResponse, Void> PARSER =
new ConstructingObjectParser<>("get_datafeed_stats_response",
true,
a -> new GetDatafeedStatsResponse((List<DatafeedStats>) a[0], (long) a[1]));
static {
PARSER.declareObjectArray(constructorArg(), DatafeedStats.PARSER, RESULTS_FIELD);
PARSER.declareLong(constructorArg(), COUNT);
}
GetDatafeedStatsResponse(List<DatafeedStats> results, long count) {
super(RESULTS_FIELD, results, count);
}
/**
* The collection of {@link DatafeedStats} objects found in the query
*/
public List<DatafeedStats> datafeedStats() {
return results;
}
public static GetDatafeedStatsResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public int hashCode() {
return Objects.hash(results, count);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetDatafeedStatsResponse other = (GetDatafeedStatsResponse) obj;
return Objects.equals(results, other.results) && count == other.count;
}
@Override
public final String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* Request to preview a MachineLearning Datafeed
*/
public class PreviewDatafeedRequest extends ActionRequest implements ToXContentObject {
public static final ConstructingObjectParser<PreviewDatafeedRequest, Void> PARSER = new ConstructingObjectParser<>(
"open_datafeed_request", true, a -> new PreviewDatafeedRequest((String) a[0]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID);
}
public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String datafeedId;
/**
* Create a new request with the desired datafeedId
*
* @param datafeedId unique datafeedId, must not be null
*/
public PreviewDatafeedRequest(String datafeedId) {
this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null");
}
public String getDatafeedId() {
return datafeedId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public int hashCode() {
return Objects.hash(datafeedId);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
PreviewDatafeedRequest that = (PreviewDatafeedRequest) other;
return Objects.equals(datafeedId, that.datafeedId);
}
}

View File

@ -0,0 +1,113 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* Response containing a datafeed preview in JSON format
*/
public class PreviewDatafeedResponse extends ActionResponse implements ToXContentObject {
private BytesReference preview;
public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
parser.nextToken();
builder.copyCurrentStructure(parser);
return new PreviewDatafeedResponse(BytesReference.bytes(builder));
}
}
public PreviewDatafeedResponse(BytesReference preview) {
this.preview = preview;
}
public BytesReference getPreview() {
return preview;
}
/**
* Parses the preview to a list of {@link Map} objects
* @return List of previewed data
* @throws IOException If there is a parsing issue with the {@link BytesReference}
* @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails
*/
@SuppressWarnings("unchecked")
public List<Map<String, Object>> getDataList() throws IOException {
try(StreamInput streamInput = preview.streamInput();
XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput)) {
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
return parser.listOrderedMap().stream().map(obj -> (Map<String, Object>)obj).collect(Collectors.toList());
} else {
return Collections.singletonList(parser.mapOrdered());
}
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
try (InputStream stream = preview.streamInput()) {
builder.rawValue(stream, XContentType.JSON);
}
return builder;
}
@Override
public int hashCode() {
return Objects.hash(preview);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj;
return Objects.equals(preview, other.preview);
}
@Override
public final String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.common.ParseField;
import java.util.Locale;
/**
* Datafeed State POJO
*/
public enum DatafeedState {
STARTED, STOPPED, STARTING, STOPPING;
public static final ParseField STATE = new ParseField("state");
public static DatafeedState fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}

View File

@ -0,0 +1,136 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.client.ml.NodeAttributes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
/**
* Datafeed Statistics POJO
*/
public class DatafeedStats implements ToXContentObject {
private final String datafeedId;
private final DatafeedState datafeedState;
@Nullable
private final NodeAttributes node;
@Nullable
private final String assignmentExplanation;
public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
public static final ParseField NODE = new ParseField("node");
public static final ConstructingObjectParser<DatafeedStats, Void> PARSER = new ConstructingObjectParser<>("datafeed_stats",
true,
a -> {
String datafeedId = (String)a[0];
DatafeedState datafeedState = DatafeedState.fromString((String)a[1]);
NodeAttributes nodeAttributes = (NodeAttributes)a[2];
String assignmentExplanation = (String)a[3];
return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation);
} );
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
}
public DatafeedStats(String datafeedId, DatafeedState datafeedState, @Nullable NodeAttributes node,
@Nullable String assignmentExplanation) {
this.datafeedId = Objects.requireNonNull(datafeedId);
this.datafeedState = Objects.requireNonNull(datafeedState);
this.node = node;
this.assignmentExplanation = assignmentExplanation;
}
public String getDatafeedId() {
return datafeedId;
}
public DatafeedState getDatafeedState() {
return datafeedState;
}
public NodeAttributes getNode() {
return node;
}
public String getAssignmentExplanation() {
return assignmentExplanation;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.field(DatafeedState.STATE.getPreferredName(), datafeedState.toString());
if (node != null) {
builder.startObject("node");
builder.field("id", node.getId());
builder.field("name", node.getName());
builder.field("ephemeral_id", node.getEphemeralId());
builder.field("transport_address", node.getTransportAddress());
builder.startObject("attributes");
for (Map.Entry<String, String> entry : node.getAttributes().entrySet()) {
if (entry.getKey().startsWith("ml.")) {
builder.field(entry.getKey(), entry.getValue());
}
}
builder.endObject();
builder.endObject();
}
if (assignmentExplanation != null) {
builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DatafeedStats other = (DatafeedStats) obj;
return Objects.equals(datafeedId, other.datafeedId) &&
Objects.equals(this.datafeedState, other.datafeedState) &&
Objects.equals(this.node, other.node) &&
Objects.equals(this.assignmentExplanation, other.assignmentExplanation);
}
}

View File

@ -215,7 +215,7 @@ public class Detector implements ToXContentObject {
}
/**
* Excludes frequently-occuring metrics from the analysis;
* Excludes frequently-occurring metrics from the analysis;
* can apply to 'by' field, 'over' field, or both
*
* @return the value that the user set

View File

@ -57,7 +57,6 @@ public class Job implements ToXContentObject {
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time");
public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory");
public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config");
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
@ -66,6 +65,7 @@ public class Job implements ToXContentObject {
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name");
public static final ParseField DELETING = new ParseField("deleting");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", true, Builder::new);
@ -82,10 +82,6 @@ public class Job implements ToXContentObject {
(p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()),
FINISHED_TIME,
ValueType.VALUE);
PARSER.declareField(Builder::setLastDataTime,
(p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()),
LAST_DATA_TIME,
ValueType.VALUE);
PARSER.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY);
PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG);
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS);
@ -99,6 +95,7 @@ public class Job implements ToXContentObject {
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
PARSER.declareBoolean(Builder::setDeleting, DELETING);
}
private final String jobId;
@ -108,7 +105,6 @@ public class Job implements ToXContentObject {
private final String description;
private final Date createTime;
private final Date finishedTime;
private final Date lastDataTime;
private final Long establishedModelMemory;
private final AnalysisConfig analysisConfig;
private final AnalysisLimits analysisLimits;
@ -121,13 +117,14 @@ public class Job implements ToXContentObject {
private final Map<String, Object> customSettings;
private final String modelSnapshotId;
private final String resultsIndexName;
private final Boolean deleting;
private Job(String jobId, String jobType, List<String> groups, String description, Date createTime,
Date finishedTime, Date lastDataTime, Long establishedModelMemory,
private Job(String jobId, String jobType, List<String> groups, String description,
Date createTime, Date finishedTime, Long establishedModelMemory,
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
String modelSnapshotId, String resultsIndexName) {
String modelSnapshotId, String resultsIndexName, Boolean deleting) {
this.jobId = jobId;
this.jobType = jobType;
@ -135,7 +132,6 @@ public class Job implements ToXContentObject {
this.description = description;
this.createTime = createTime;
this.finishedTime = finishedTime;
this.lastDataTime = lastDataTime;
this.establishedModelMemory = establishedModelMemory;
this.analysisConfig = analysisConfig;
this.analysisLimits = analysisLimits;
@ -148,6 +144,7 @@ public class Job implements ToXContentObject {
this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings);
this.modelSnapshotId = modelSnapshotId;
this.resultsIndexName = resultsIndexName;
this.deleting = deleting;
}
/**
@ -205,16 +202,6 @@ public class Job implements ToXContentObject {
return finishedTime;
}
/**
* The last time data was uploaded to the job or <code>null</code> if no
* data has been seen.
*
* @return The date at which the last data was processed
*/
public Date getLastDataTime() {
return lastDataTime;
}
/**
* The established model memory of the job, or <code>null</code> if model
* memory has not reached equilibrium yet.
@ -292,6 +279,10 @@ public class Job implements ToXContentObject {
return modelSnapshotId;
}
public Boolean getDeleting() {
return deleting;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -313,10 +304,6 @@ public class Job implements ToXContentObject {
builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix,
finishedTime.getTime());
}
if (lastDataTime != null) {
builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix,
lastDataTime.getTime());
}
if (establishedModelMemory != null) {
builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory);
}
@ -351,6 +338,9 @@ public class Job implements ToXContentObject {
if (resultsIndexName != null) {
builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName);
}
if (deleting != null) {
builder.field(DELETING.getPreferredName(), deleting);
}
builder.endObject();
return builder;
}
@ -372,7 +362,6 @@ public class Job implements ToXContentObject {
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.finishedTime, that.finishedTime)
&& Objects.equals(this.lastDataTime, that.lastDataTime)
&& Objects.equals(this.establishedModelMemory, that.establishedModelMemory)
&& Objects.equals(this.analysisConfig, that.analysisConfig)
&& Objects.equals(this.analysisLimits, that.analysisLimits)
@ -384,15 +373,16 @@ public class Job implements ToXContentObject {
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
&& Objects.equals(this.customSettings, that.customSettings)
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
&& Objects.equals(this.resultsIndexName, that.resultsIndexName);
&& Objects.equals(this.resultsIndexName, that.resultsIndexName)
&& Objects.equals(this.deleting, that.deleting);
}
@Override
public int hashCode() {
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, establishedModelMemory,
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
modelSnapshotId, resultsIndexName);
modelSnapshotId, resultsIndexName, deleting);
}
@Override
@ -415,7 +405,6 @@ public class Job implements ToXContentObject {
private DataDescription dataDescription;
private Date createTime;
private Date finishedTime;
private Date lastDataTime;
private Long establishedModelMemory;
private ModelPlotConfig modelPlotConfig;
private Long renormalizationWindowDays;
@ -425,6 +414,7 @@ public class Job implements ToXContentObject {
private Map<String, Object> customSettings;
private String modelSnapshotId;
private String resultsIndexName;
private Boolean deleting;
private Builder() {
}
@ -443,7 +433,6 @@ public class Job implements ToXContentObject {
this.dataDescription = job.getDataDescription();
this.createTime = job.getCreateTime();
this.finishedTime = job.getFinishedTime();
this.lastDataTime = job.getLastDataTime();
this.establishedModelMemory = job.getEstablishedModelMemory();
this.modelPlotConfig = job.getModelPlotConfig();
this.renormalizationWindowDays = job.getRenormalizationWindowDays();
@ -453,6 +442,7 @@ public class Job implements ToXContentObject {
this.customSettings = job.getCustomSettings();
this.modelSnapshotId = job.getModelSnapshotId();
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
this.deleting = job.getDeleting();
}
public Builder setId(String id) {
@ -504,16 +494,6 @@ public class Job implements ToXContentObject {
return this;
}
/**
* Set the wall clock time of the last data upload
*
* @param lastDataTime Wall clock time
*/
public Builder setLastDataTime(Date lastDataTime) {
this.lastDataTime = lastDataTime;
return this;
}
public Builder setEstablishedModelMemory(Long establishedModelMemory) {
this.establishedModelMemory = establishedModelMemory;
return this;
@ -559,6 +539,11 @@ public class Job implements ToXContentObject {
return this;
}
Builder setDeleting(Boolean deleting) {
this.deleting = deleting;
return this;
}
/**
* Builds a job.
*
@ -568,10 +553,10 @@ public class Job implements ToXContentObject {
Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null");
Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null");
return new Job(
id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
id, jobType, groups, description, createTime, finishedTime, establishedModelMemory,
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
modelSnapshotId, resultsIndexName);
modelSnapshotId, resultsIndexName, deleting);
}
}
}

View File

@ -48,6 +48,7 @@ public class AnomalyRecord implements ToXContentObject {
* Result fields (all detector types)
*/
public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
@ -94,6 +95,7 @@ public class AnomalyRecord implements ToXContentObject {
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT);
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX);
@ -117,6 +119,7 @@ public class AnomalyRecord implements ToXContentObject {
private final String jobId;
private int detectorIndex;
private double probability;
private Double multiBucketImpact;
private String byFieldName;
private String byFieldValue;
private String correlatedByFieldValue;
@ -155,6 +158,9 @@ public class AnomalyRecord implements ToXContentObject {
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(PROBABILITY.getPreferredName(), probability);
if (multiBucketImpact != null) {
builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact);
}
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
@ -254,6 +260,14 @@ public class AnomalyRecord implements ToXContentObject {
probability = value;
}
public double getMultiBucketImpact() {
return multiBucketImpact;
}
void setMultiBucketImpact(double value) {
multiBucketImpact = value;
}
public String getByFieldName() {
return byFieldName;
}
@ -376,7 +390,7 @@ public class AnomalyRecord implements ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore,
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,
initialRecordScore, typical, actual,function, functionDescription, fieldName,
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
@ -399,6 +413,7 @@ public class AnomalyRecord implements ToXContentObject {
&& this.detectorIndex == that.detectorIndex
&& this.bucketSpan == that.bucketSpan
&& this.probability == that.probability
&& Objects.equals(this.multiBucketImpact, that.multiBucketImpact)
&& this.recordScore == that.recordScore
&& this.initialRecordScore == that.initialRecordScore
&& Objects.deepEquals(this.typical, that.typical)

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public abstract class AcknowledgedResponse implements ToXContentObject {
private final boolean acknowledged;
public AcknowledgedResponse(final boolean acknowledged) {
this.acknowledged = acknowledged;
}
public boolean isAcknowledged() {
return acknowledged;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final AcknowledgedResponse that = (AcknowledgedResponse) o;
return isAcknowledged() == that.isAcknowledged();
}
@Override
public int hashCode() {
return Objects.hash(acknowledged);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
{
builder.field("acknowledged", isAcknowledged());
}
builder.endObject();
return builder;
}
}

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
public class DeleteRollupJobRequest implements Validatable, ToXContentObject {
private static final ParseField ID_FIELD = new ParseField("id");
private final String id;
public DeleteRollupJobRequest(String id) {
this.id = Objects.requireNonNull(id, "id parameter must not be null");
}
public String getId() {
return id;
}
private static final ConstructingObjectParser<DeleteRollupJobRequest, Void> PARSER =
new ConstructingObjectParser<>("request", a -> {
return new DeleteRollupJobRequest((String) a[0]);
});
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID_FIELD);
}
public static DeleteRollupJobRequest fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ID_FIELD.getPreferredName(), this.id);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteRollupJobRequest that = (DeleteRollupJobRequest) o;
return Objects.equals(id, that.id);
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class DeleteRollupJobResponse extends AcknowledgedResponse {
public DeleteRollupJobResponse(boolean acknowledged) {
super(acknowledged);
}
public static DeleteRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private static final ConstructingObjectParser<DeleteRollupJobResponse, Void> PARSER
= new ConstructingObjectParser<>("delete_rollup_job_response", true,
args -> new DeleteRollupJobResponse((boolean) args[0]));
static {
PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged"));
}
}

View File

@ -20,52 +20,21 @@ package org.elasticsearch.client.rollup;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class PutRollupJobResponse implements ToXContentObject {
public class PutRollupJobResponse extends AcknowledgedResponse {
private final boolean acknowledged;
public PutRollupJobResponse(final boolean acknowledged) {
this.acknowledged = acknowledged;
public PutRollupJobResponse(boolean acknowledged) {
super(acknowledged);
}
public boolean isAcknowledged() {
return acknowledged;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final PutRollupJobResponse that = (PutRollupJobResponse) o;
return isAcknowledged() == that.isAcknowledged();
}
@Override
public int hashCode() {
return Objects.hash(acknowledged);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.field("acknowledged", isAcknowledged());
}
builder.endObject();
return builder;
public static PutRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private static final ConstructingObjectParser<PutRollupJobResponse, Void> PARSER
@ -73,8 +42,4 @@ public class PutRollupJobResponse implements ToXContentObject {
static {
PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged"));
}
public static PutRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.apache.http.client.methods.HttpGet;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
* Request object to retrieve the X.509 certificates that are used to encrypt communications in an Elasticsearch cluster.
*/
public final class GetSslCertificatesRequest implements Validatable, ToXContentObject {
public static final GetSslCertificatesRequest INSTANCE = new GetSslCertificatesRequest();
private final Request request;
private GetSslCertificatesRequest() {
request = new Request(HttpGet.METHOD_NAME, "/_xpack/ssl/certificates");
}
public Request getRequest() {
return request;
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject().endObject();
}
}

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.client.security.support.CertificateInfo;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* Response object when retrieving the X.509 certificates that are used to encrypt communications in an Elasticsearch cluster.
* Returns a list of {@link CertificateInfo} objects describing each of the certificates.
*/
public final class GetSslCertificatesResponse {
private final List<CertificateInfo> certificates;
public GetSslCertificatesResponse(List<CertificateInfo> certificates) {
this.certificates = certificates;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final GetSslCertificatesResponse that = (GetSslCertificatesResponse) o;
return Objects.equals(this.certificates, that.certificates);
}
@Override
public int hashCode() {
return Objects.hash(certificates);
}
public static GetSslCertificatesResponse fromXContent(XContentParser parser) throws IOException {
List<CertificateInfo> certificates = new ArrayList<>();
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
certificates.add(CertificateInfo.PARSER.parse(parser, null));
}
return new GetSslCertificatesResponse(certificates);
}
public List<CertificateInfo> getCertificates() {
return certificates;
}
}

View File

@ -0,0 +1,124 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Request object to create or update a role mapping.
*/
public final class PutRoleMappingRequest implements Validatable, ToXContentObject {
private final String name;
private final boolean enabled;
private final List<String> roles;
private final RoleMapperExpression rules;
private final Map<String, Object> metadata;
private final RefreshPolicy refreshPolicy;
public PutRoleMappingRequest(final String name, final boolean enabled, final List<String> roles, final RoleMapperExpression rules,
@Nullable final Map<String, Object> metadata, @Nullable final RefreshPolicy refreshPolicy) {
if (Strings.hasText(name) == false) {
throw new IllegalArgumentException("role-mapping name is missing");
}
this.name = name;
this.enabled = enabled;
if (roles == null || roles.isEmpty()) {
throw new IllegalArgumentException("role-mapping roles are missing");
}
this.roles = Collections.unmodifiableList(roles);
this.rules = Objects.requireNonNull(rules, "role-mapping rules are missing");
this.metadata = (metadata == null) ? Collections.emptyMap() : metadata;
this.refreshPolicy = (refreshPolicy == null) ? RefreshPolicy.getDefault() : refreshPolicy;
}
public String getName() {
return name;
}
public boolean isEnabled() {
return enabled;
}
public List<String> getRoles() {
return roles;
}
public RoleMapperExpression getRules() {
return rules;
}
public Map<String, Object> getMetadata() {
return metadata;
}
public RefreshPolicy getRefreshPolicy() {
return refreshPolicy;
}
@Override
public int hashCode() {
return Objects.hash(name, enabled, refreshPolicy, roles, rules, metadata);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final PutRoleMappingRequest other = (PutRoleMappingRequest) obj;
return (enabled == other.enabled) &&
(refreshPolicy == other.refreshPolicy) &&
Objects.equals(name, other.name) &&
Objects.equals(roles, other.roles) &&
Objects.equals(rules, other.rules) &&
Objects.equals(metadata, other.metadata);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("enabled", enabled);
builder.field("roles", roles);
builder.field("rules", rules);
builder.field("metadata", metadata);
return builder.endObject();
}
}

View File

@ -0,0 +1,77 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Response when adding/updating a role mapping. Returns a boolean field for
* whether the role mapping was created or updated.
*/
public final class PutRoleMappingResponse {
private final boolean created;
public PutRoleMappingResponse(boolean created) {
this.created = created;
}
public boolean isCreated() {
return created;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final PutRoleMappingResponse that = (PutRoleMappingResponse) o;
return created == that.created;
}
@Override
public int hashCode() {
return Objects.hash(created);
}
private static final ConstructingObjectParser<PutRoleMappingResponse, Void> PARSER = new ConstructingObjectParser<>(
"put_role_mapping_response", true, args -> new PutRoleMappingResponse((boolean) args[0]));
static {
PARSER.declareBoolean(constructorArg(), new ParseField("created"));
// To parse the "created" field we declare "role_mapping" field object.
// Once the nested field "created" is found parser constructs the target object and
// ignores the role_mapping object.
PARSER.declareObject((a,b) -> {}, (parser, context) -> null, new ParseField("role_mapping"));
}
public static PutRoleMappingResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -0,0 +1,133 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Simple model of an X.509 certificate
*/
public final class CertificateInfo {
public static final ParseField PATH = new ParseField("path");
public static final ParseField FORMAT = new ParseField("format");
public static final ParseField ALIAS = new ParseField("alias");
public static final ParseField SUBJECT_DN = new ParseField("subject_dn");
public static final ParseField SERIAL_NUMBER = new ParseField("serial_number");
public static final ParseField HAS_PRIVATE_KEY = new ParseField("has_private_key");
public static final ParseField EXPIRY = new ParseField("expiry");
private final String path;
private final String format;
private final String alias;
private final String subjectDn;
private final String serialNumber;
private final boolean hasPrivateKey;
private final String expiry;
public CertificateInfo(String path, String format, @Nullable String alias, String subjectDn, String serialNumber, boolean hasPrivateKey,
String expiry) {
this.path = path;
this.format = format;
this.alias = alias;
this.subjectDn = subjectDn;
this.serialNumber = serialNumber;
this.hasPrivateKey = hasPrivateKey;
this.expiry = expiry;
}
public String getPath() {
return path;
}
public String getFormat() {
return format;
}
public String getAlias() {
return alias;
}
public String getSubjectDn() {
return subjectDn;
}
public String getSerialNumber() {
return serialNumber;
}
public boolean isHasPrivateKey() {
return hasPrivateKey;
}
public String getExpiry() {
return expiry;
}
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<CertificateInfo, Void> PARSER = new ConstructingObjectParser<>("certificate_info",
true, args -> new CertificateInfo((String) args[0], (String) args[1], (String) args[2], (String) args[3], (String) args[4],
(boolean) args[5], (String) args[6]));
static {
PARSER.declareString(constructorArg(), PATH);
PARSER.declareString(constructorArg(), FORMAT);
PARSER.declareStringOrNull(constructorArg(), ALIAS);
PARSER.declareString(constructorArg(), SUBJECT_DN);
PARSER.declareString(constructorArg(), SERIAL_NUMBER);
PARSER.declareBoolean(constructorArg(), HAS_PRIVATE_KEY);
PARSER.declareString(constructorArg(), EXPIRY);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final CertificateInfo that = (CertificateInfo) other;
return this.path.equals(that.path)
&& this.format.equals(that.format)
&& this.hasPrivateKey == that.hasPrivateKey
&& Objects.equals(this.alias, that.alias)
&& this.serialNumber.equals(that.serialNumber)
&& this.subjectDn.equals(that.subjectDn)
&& this.expiry.equals(that.expiry);
}
@Override
public int hashCode() {
return Objects.hash(path, format, alias, subjectDn, serialNumber, hasPrivateKey, expiry);
}
public static CertificateInfo fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -57,7 +57,7 @@ public abstract class CompositeRoleMapperExpression implements RoleMapperExpress
}
public String getName() {
return this.getName();
return this.name;
}
public List<RoleMapperExpression> getElements() {

View File

@ -50,7 +50,7 @@ public class FieldRoleMapperExpression implements RoleMapperExpression {
throw new IllegalArgumentException("null or empty field name (" + field + ")");
}
if (values == null || values.length == 0) {
throw new IllegalArgumentException("null or empty values (" + values + ")");
throw new IllegalArgumentException("null or empty values for field (" + field + ")");
}
this.field = field;
this.values = Collections.unmodifiableList(Arrays.asList(values));

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import java.util.Objects;
/**
* A request to explicitly activate a watch.
*/
public final class ActivateWatchRequest implements Validatable {
private final String watchId;
public ActivateWatchRequest(String watchId) {
this.watchId = Objects.requireNonNull(watchId, "Watch identifier is required");
if (PutWatchRequest.isValidId(this.watchId) == false) {
throw new IllegalArgumentException("Watch identifier contains whitespace");
}
}
/**
* @return The ID of the watch to be activated.
*/
public String getWatchId() {
return watchId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ActivateWatchRequest that = (ActivateWatchRequest) o;
return Objects.equals(watchId, that.watchId);
}
@Override
public int hashCode() {
int result = Objects.hash(watchId);
return result;
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* Response from an 'activate watch' request.
*/
public final class ActivateWatchResponse {
private static final ParseField STATUS_FIELD = new ParseField("status");
private static ConstructingObjectParser<ActivateWatchResponse, Void> PARSER =
new ConstructingObjectParser<>("activate_watch_response", true,
a -> new ActivateWatchResponse((WatchStatus) a[0]));
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
(parser, context) -> WatchStatus.parse(parser),
STATUS_FIELD);
}
private final WatchStatus status;
public ActivateWatchResponse(WatchStatus status) {
this.status = status;
}
public WatchStatus getStatus() {
return status;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ActivateWatchResponse that = (ActivateWatchResponse) o;
return Objects.equals(status, that.status);
}
@Override
public int hashCode() {
return Objects.hash(status);
}
public static ActivateWatchResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -78,6 +78,16 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase {
assertEquals(expectedConfig.getConfigAsMap(), response.pipelines().get(0).getConfigAsMap());
}
public void testGetNonexistentPipeline() throws IOException {
String id = "nonexistent_pipeline_id";
GetPipelineRequest request = new GetPipelineRequest(id);
GetPipelineResponse response =
execute(request, highLevelClient().ingest()::getPipeline, highLevelClient().ingest()::getPipelineAsync);
assertFalse(response.isFound());
}
public void testDeletePipeline() throws IOException {
String id = "some_pipeline_id";
{

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpPost;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.client.license.StartBasicRequest;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.client.RequestConvertersTests.setRandomMasterTimeout;
import static org.elasticsearch.client.RequestConvertersTests.setRandomTimeout;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
public class LicenseRequestConvertersTests extends ESTestCase {
public void testStartBasic() {
final boolean acknowledge = randomBoolean();
StartBasicRequest startBasicRequest = new StartBasicRequest(acknowledge);
Map<String, String> expectedParams = new HashMap<>();
if (acknowledge) {
expectedParams.put("acknowledge", Boolean.TRUE.toString());
}
setRandomTimeout(startBasicRequest, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
setRandomMasterTimeout(startBasicRequest, expectedParams);
Request request = LicenseRequestConverters.startBasic(startBasicRequest);
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
assertThat(request.getEndpoint(), equalTo("/_xpack/license/start_basic"));
assertThat(request.getParameters(), equalTo(expectedParams));
assertThat(request.getEntity(), is(nullValue()));
}
}

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.Build;
import org.elasticsearch.client.license.StartBasicRequest;
import org.elasticsearch.client.license.StartBasicResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.protocol.xpack.license.LicensesStatus;
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
import org.elasticsearch.protocol.xpack.license.PutLicenseResponse;
import org.junit.After;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.empty;
public class LicensingIT extends ESRestHighLevelClientTestCase {
@BeforeClass
public static void checkForSnapshot() {
assumeTrue("Trial license used to rollback is only valid when tested against snapshot/test builds",
Build.CURRENT.isSnapshot());
}
@After
public void rollbackToTrial() throws IOException {
putTrialLicense();
}
public static void putTrialLicense() throws IOException {
assumeTrue("Trial license is only valid when tested against snapshot/test builds",
Build.CURRENT.isSnapshot());
// use a hard-coded trial license for 20 yrs to be able to roll back from another licenses
final String licenseDefinition = Strings.toString(jsonBuilder()
.startObject()
.field("licenses", Arrays.asList(
MapBuilder.<String, Object>newMapBuilder()
.put("uid", "96fc37c6-6fc9-43e2-a40d-73143850cd72")
.put("type", "trial")
// 2018-10-16 07:02:48 UTC
.put("issue_date_in_millis", "1539673368158")
// 2038-10-11 07:02:48 UTC, 20 yrs later
.put("expiry_date_in_millis", "2170393368158")
.put("max_nodes", "5")
.put("issued_to", "client_rest-high-level_integTestCluster")
.put("issuer", "elasticsearch")
.put("start_date_in_millis", "-1")
.put("signature",
"AAAABAAAAA3FXON9kGmNqmH+ASDWAAAAIAo5/x6hrsGh1GqqrJmy4qgmEC7gK0U4zQ6q5ZEMhm4jAAABAAcdKHL0BfM2uqTgT7BDuFxX5lb"
+ "t/bHDVJ421Wwgm5p3IMbw/W13iiAHz0hhDziF7acJbc/y65L+BKGtVC1gSSHeLDHaAD66VrjKxfc7VbGyJIAYBOdujf0rheurmaD3IcNo"
+ "/tWDjCdtTwrNziFkorsGcPadBP5Yc6csk3/Q74DlfiYweMBxLUfkBERwxwd5OQS6ujGvl/4bb8p5zXvOw8vMSaAXSXXnExP6lam+0934W"
+ "0kHvU7IGk+fCUjOaiSWKSoE4TEcAtVNYj/oRoRtfQ1KQGpdCHxTHs1BimdZaG0nBHDsvhYlVVLSvHN6QzqsHWgFDG6JJxhtU872oTRSUHA=")
.immutableMap()))
.endObject());
final PutLicenseRequest request = new PutLicenseRequest();
request.setAcknowledge(true);
request.setLicenseDefinition(licenseDefinition);
final PutLicenseResponse response = highLevelClient().license().putLicense(request, RequestOptions.DEFAULT);
assertThat(response.isAcknowledged(), equalTo(true));
assertThat(response.status(), equalTo(LicensesStatus.VALID));
}
public void testStartBasic() throws Exception {
// we don't test the case where we successfully start a basic because the integ test cluster generates one on startup
// and we don't have a good way to prevent that / work around it in this test project
// case where we don't acknowledge basic license conditions
{
final StartBasicRequest request = new StartBasicRequest();
final StartBasicResponse response = highLevelClient().license().startBasic(request, RequestOptions.DEFAULT);
assertThat(response.isAcknowledged(), equalTo(false));
assertThat(response.isBasicStarted(), equalTo(false));
assertThat(response.getErrorMessage(), equalTo("Operation failed: Needs acknowledgement."));
assertThat(response.getAcknowledgeMessage(),
containsString("This license update requires acknowledgement. " +
"To acknowledge the license, please read the following messages and call /start_basic again"));
assertNotEmptyAcknowledgeMessages(response);
}
// case where we acknowledge and the basic is started successfully
{
final StartBasicRequest request = new StartBasicRequest(true);
final StartBasicResponse response = highLevelClient().license().startBasic(request, RequestOptions.DEFAULT);
assertThat(response.isAcknowledged(), equalTo(true));
assertThat(response.isBasicStarted(), equalTo(true));
assertThat(response.getErrorMessage(), nullValue());
assertThat(response.getAcknowledgeMessage(), nullValue());
assertThat(response.getAcknowledgeMessages().size(), equalTo(0));
}
}
private static void assertNotEmptyAcknowledgeMessages(StartBasicResponse response) {
assertThat(response.getAcknowledgeMessages().entrySet(), not(empty()));
for (Map.Entry<String, String[]> entry : response.getAcknowledgeMessages().entrySet()) {
assertThat(entry.getKey(), not(isEmptyOrNullString()));
final List<String> messages = Arrays.asList(entry.getValue());
for (String message : messages) {
assertThat(message, not(isEmptyOrNullString()));
}
}
}
}

View File

@ -34,6 +34,7 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetInfluencersRequest;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobStatsRequest;
@ -41,6 +42,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutJobRequest;
@ -162,11 +164,18 @@ public class MLRequestConvertersTests extends ESTestCase {
Request request = MLRequestConverters.deleteJob(deleteJobRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint());
assertEquals(Boolean.toString(false), request.getParameters().get("force"));
assertNull(request.getParameters().get("force"));
assertNull(request.getParameters().get("wait_for_completion"));
deleteJobRequest = new DeleteJobRequest(jobId);
deleteJobRequest.setForce(true);
request = MLRequestConverters.deleteJob(deleteJobRequest);
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
deleteJobRequest = new DeleteJobRequest(jobId);
deleteJobRequest.setWaitForCompletion(false);
request = MLRequestConverters.deleteJob(deleteJobRequest);
assertEquals(Boolean.toString(false), request.getParameters().get("wait_for_completion"));
}
public void testFlushJob() throws Exception {
@ -293,6 +302,30 @@ public class MLRequestConvertersTests extends ESTestCase {
}
}
public void testGetDatafeedStats() {
GetDatafeedStatsRequest getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest();
Request request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/datafeeds/_stats", request.getEndpoint());
assertFalse(request.getParameters().containsKey("allow_no_datafeeds"));
getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest("datafeed1", "datafeeds*");
getDatafeedStatsRequestRequest.setAllowNoDatafeeds(true);
request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest);
assertEquals("/_xpack/ml/datafeeds/datafeed1,datafeeds*/_stats", request.getEndpoint());
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_datafeeds"));
}
public void testPreviewDatafeed() {
PreviewDatafeedRequest datafeedRequest = new PreviewDatafeedRequest("datafeed_1");
Request request = MLRequestConverters.previewDatafeed(datafeedRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/datafeeds/" + datafeedRequest.getDatafeedId() + "/_preview", request.getEndpoint());
}
public void testDeleteForecast() {
String jobId = randomAlphaOfLength(10);
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId);

View File

@ -33,6 +33,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
@ -41,6 +42,8 @@ import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
@ -49,6 +52,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -63,6 +68,8 @@ import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.CalendarTests;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedState;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.DataDescription;
import org.elasticsearch.client.ml.job.config.Detector;
@ -76,8 +83,11 @@ import org.elasticsearch.rest.RestStatus;
import org.junit.After;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@ -142,17 +152,33 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
}
public void testDeleteJob() throws Exception {
public void testDeleteJob_GivenWaitForCompletionIsTrue() throws Exception {
String jobId = randomValidJobId();
Job job = buildJob(jobId);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
AcknowledgedResponse response = execute(new DeleteJobRequest(jobId),
DeleteJobResponse response = execute(new DeleteJobRequest(jobId),
machineLearningClient::deleteJob,
machineLearningClient::deleteJobAsync);
assertTrue(response.isAcknowledged());
assertTrue(response.getAcknowledged());
assertNull(response.getTask());
}
public void testDeleteJob_GivenWaitForCompletionIsFalse() throws Exception {
String jobId = randomValidJobId();
Job job = buildJob(jobId);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId);
deleteJobRequest.setWaitForCompletion(false);
DeleteJobResponse response = execute(deleteJobRequest, machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync);
assertNull(response.getAcknowledged());
assertNotNull(response.getTask());
}
public void testOpenJob() throws Exception {
@ -564,6 +590,126 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
}
}
public void testGetDatafeedStats() throws Exception {
String jobId1 = "ml-get-datafeed-stats-test-id-1";
String jobId2 = "ml-get-datafeed-stats-test-id-2";
String indexName = "datafeed_stats_data_1";
// Set up the index
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long");
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
// create the job and the datafeed
Job job1 = buildJob(jobId1);
putJob(job1);
openJob(job1);
Job job2 = buildJob(jobId2);
putJob(job2);
String datafeedId1 = createAndPutDatafeed(jobId1, indexName);
String datafeedId2 = createAndPutDatafeed(jobId2, indexName);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
machineLearningClient.startDatafeed(new StartDatafeedRequest(datafeedId1), RequestOptions.DEFAULT);
GetDatafeedStatsRequest request = new GetDatafeedStatsRequest(datafeedId1);
// Test getting specific
GetDatafeedStatsResponse response =
execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
assertEquals(1, response.count());
assertThat(response.datafeedStats(), hasSize(1));
assertThat(response.datafeedStats().get(0).getDatafeedId(), equalTo(datafeedId1));
assertThat(response.datafeedStats().get(0).getDatafeedState().toString(), equalTo(DatafeedState.STARTED.toString()));
// Test getting all explicitly
request = GetDatafeedStatsRequest.getAllDatafeedStatsRequest();
response = execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
assertTrue(response.count() >= 2L);
assertTrue(response.datafeedStats().size() >= 2L);
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
hasItems(datafeedId1, datafeedId2));
// Test getting all implicitly
response =
execute(new GetDatafeedStatsRequest(), machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
assertTrue(response.count() >= 2L);
assertTrue(response.datafeedStats().size() >= 2L);
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
hasItems(datafeedId1, datafeedId2));
// Test getting all with wildcard
request = new GetDatafeedStatsRequest("ml-get-datafeed-stats-test-id-*");
response = execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync);
assertEquals(2L, response.count());
assertThat(response.datafeedStats(), hasSize(2));
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
hasItems(datafeedId1, datafeedId2));
// Test when allow_no_jobs is false
final GetDatafeedStatsRequest erroredRequest = new GetDatafeedStatsRequest("datafeeds-that-do-not-exist*");
erroredRequest.setAllowNoDatafeeds(false);
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
() -> execute(erroredRequest, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync));
assertThat(exception.status().getStatus(), equalTo(404));
}
public void testPreviewDatafeed() throws Exception {
String jobId = "test-preview-datafeed";
String indexName = "preview_data_1";
// Set up the index and docs
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long");
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
long now = (System.currentTimeMillis()/1000)*1000;
long thePast = now - 60000;
int i = 0;
List<Integer> totalTotals = new ArrayList<>(60);
while(thePast < now) {
Integer total = randomInt(1000);
IndexRequest doc = new IndexRequest();
doc.index(indexName);
doc.type("doc");
doc.id("id" + i);
doc.source("{\"total\":" + total + ",\"timestamp\":"+ thePast +"}", XContentType.JSON);
bulk.add(doc);
thePast += 1000;
i++;
totalTotals.add(total);
}
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
// create the job and the datafeed
Job job = buildJob(jobId);
putJob(job);
openJob(job);
String datafeedId = jobId + "-feed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId)
.setIndices(indexName)
.setQueryDelay(TimeValue.timeValueSeconds(1))
.setTypes(Collections.singletonList("doc"))
.setFrequency(TimeValue.timeValueSeconds(1)).build();
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
PreviewDatafeedResponse response = execute(new PreviewDatafeedRequest(datafeedId),
machineLearningClient::previewDatafeed,
machineLearningClient::previewDatafeedAsync);
Integer[] totals = response.getDataList().stream().map(map -> (Integer)map.get("total")).toArray(Integer[]::new);
assertThat(totalTotals, containsInAnyOrder(totals));
}
public void testDeleteForecast() throws Exception {
String jobId = "test-delete-forecast";

View File

@ -1572,6 +1572,12 @@ public class RequestConvertersTests extends ESTestCase {
setRandomLocal(request::local, expectedParams);
}
static void setRandomTimeout(TimedRequest request, TimeValue defaultTimeout, Map<String, String> expectedParams) {
setRandomTimeout(s ->
request.setTimeout(TimeValue.parseTimeValue(s, request.getClass().getName() + ".timeout")),
defaultTimeout, expectedParams);
}
static void setRandomTimeout(Consumer<String> setter, TimeValue defaultTimeout, Map<String, String> expectedParams) {
if (randomBoolean()) {
String timeout = randomTimeValue();
@ -1583,9 +1589,19 @@ public class RequestConvertersTests extends ESTestCase {
}
static void setRandomMasterTimeout(MasterNodeRequest<?> request, Map<String, String> expectedParams) {
setRandomMasterTimeout(request::masterNodeTimeout, expectedParams);
}
static void setRandomMasterTimeout(TimedRequest request, Map<String, String> expectedParams) {
setRandomMasterTimeout(s ->
request.setMasterTimeout(TimeValue.parseTimeValue(s, request.getClass().getName() + ".masterNodeTimeout")),
expectedParams);
}
static void setRandomMasterTimeout(Consumer<String> setter, Map<String, String> expectedParams) {
if (randomBoolean()) {
String masterTimeout = randomTimeValue();
request.masterNodeTimeout(masterTimeout);
setter.accept(masterTimeout);
expectedParams.put("master_timeout", masterTimeout);
} else {
expectedParams.put("master_timeout", MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT.getStringRep());

View File

@ -721,10 +721,16 @@ public class RestHighLevelClientTests extends ESTestCase {
methods.containsKey(apiName.substring(0, apiName.length() - 6)));
assertThat(method.getReturnType(), equalTo(Void.TYPE));
assertEquals(0, method.getExceptionTypes().length);
assertEquals(3, method.getParameterTypes().length);
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class));
assertThat(method.getParameterTypes()[2], equalTo(ActionListener.class));
if (apiName.equals("security.get_ssl_certificates_async")) {
assertEquals(2, method.getParameterTypes().length);
assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class));
assertThat(method.getParameterTypes()[1], equalTo(ActionListener.class));
} else {
assertEquals(3, method.getParameterTypes().length);
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class));
assertThat(method.getParameterTypes()[2], equalTo(ActionListener.class));
}
} else {
//A few methods return a boolean rather than a response object
if (apiName.equals("ping") || apiName.contains("exist")) {
@ -735,7 +741,7 @@ public class RestHighLevelClientTests extends ESTestCase {
assertEquals(1, method.getExceptionTypes().length);
//a few methods don't accept a request object as argument
if (apiName.equals("ping") || apiName.equals("info")) {
if (apiName.equals("ping") || apiName.equals("info") || apiName.equals("security.get_ssl_certificates")) {
assertEquals(1, method.getParameterTypes().length);
assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class));
} else {

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
@ -31,6 +32,8 @@ import org.elasticsearch.client.rollup.GetRollupJobRequest;
import org.elasticsearch.client.rollup.GetRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
import org.elasticsearch.client.rollup.PutRollupJobResponse;
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
@ -46,6 +49,7 @@ import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collections;
@ -60,19 +64,35 @@ import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
public class RollupIT extends ESRestHighLevelClientTestCase {
double sum = 0.0d;
int max = Integer.MIN_VALUE;
int min = Integer.MAX_VALUE;
private static final List<String> SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
@SuppressWarnings("unchecked")
public void testPutAndGetRollupJob() throws Exception {
double sum = 0.0d;
int max = Integer.MIN_VALUE;
int min = Integer.MAX_VALUE;
private String id;
private String indexPattern;
private String rollupIndex;
private String cron;
private int pageSize;
private int numDocs;
@Before
public void init() throws Exception {
id = randomAlphaOfLength(10);
indexPattern = randomFrom("docs", "d*", "doc*");
rollupIndex = randomFrom("rollup", "test");
cron = "*/1 * * * * ?";
numDocs = indexDocs();
pageSize = randomIntBetween(numDocs, numDocs * 10);
}
public int indexDocs() throws Exception {
final BulkRequest bulkRequest = new BulkRequest();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int minute = 0; minute < 60; minute++) {
@ -112,12 +132,33 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
assertEquals(0, refreshResponse.getFailedShards());
return numDocs;
}
final String id = randomAlphaOfLength(10);
final String indexPattern = randomFrom("docs", "d*", "doc*");
final String rollupIndex = randomFrom("rollup", "test");
final String cron = "*/1 * * * * ?";
final int pageSize = randomIntBetween(numDocs, numDocs * 10);
public void testDeleteRollupJob() throws Exception {
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY));
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
PutRollupJobRequest putRollupJobRequest =
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
final RollupClient rollupClient = highLevelClient().rollup();
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
DeleteRollupJobRequest deleteRollupJobRequest = new DeleteRollupJobRequest(id);
DeleteRollupJobResponse deleteRollupJobResponse = highLevelClient().rollup()
.deleteRollupJob(deleteRollupJobRequest, RequestOptions.DEFAULT);
assertTrue(deleteRollupJobResponse.isAcknowledged());
}
public void testDeleteMissingRollupJob() {
DeleteRollupJobRequest deleteRollupJobRequest = new DeleteRollupJobRequest(randomAlphaOfLength(10));
ElasticsearchStatusException responseException = expectThrows(ElasticsearchStatusException.class,() -> highLevelClient().rollup()
.deleteRollupJob(deleteRollupJobRequest, RequestOptions.DEFAULT));
assertThat(responseException.status().getStatus(), is(404));
}
@SuppressWarnings("unchecked")
public void testPutAndGetRollupJob() throws Exception {
// TODO expand this to also test with histogram and terms?
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY));
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
@ -134,9 +175,6 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
Response startResponse = client().performRequest(new Request("POST", "/_xpack/rollup/job/" + id + "/_start"));
assertEquals(RestStatus.OK.getStatus(), startResponse.getHttpResponse().getStatusLine().getStatusCode());
int finalMin = min;
int finalMax = max;
double finalSum = sum;
assertBusy(() -> {
SearchResponse searchResponse = highLevelClient().search(new SearchRequest(rollupIndex), RequestOptions.DEFAULT);
assertEquals(0, searchResponse.getFailedShards());
@ -154,13 +192,13 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
for (String name : metric.getMetrics()) {
Number value = (Number) source.get(metric.getField() + "." + name + ".value");
if ("min".equals(name)) {
assertEquals(finalMin, value.intValue());
assertEquals(min, value.intValue());
} else if ("max".equals(name)) {
assertEquals(finalMax, value.intValue());
assertEquals(max, value.intValue());
} else if ("sum".equals(name)) {
assertEquals(finalSum, value.doubleValue(), 0.0d);
assertEquals(sum, value.doubleValue(), 0.0d);
} else if ("avg".equals(name)) {
assertEquals(finalSum, value.doubleValue(), 0.0d);
assertEquals(sum, value.doubleValue(), 0.0d);
Number avgCount = (Number) source.get(metric.getField() + "." + name + "._count");
assertEquals(numDocs, avgCount.intValue());
} else if ("value_count".equals(name)) {
@ -191,4 +229,5 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
assertThat(getResponse.getJobs(), empty());
}
}

View File

@ -24,8 +24,12 @@ import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.security.DisableUserRequest;
import org.elasticsearch.client.security.EnableUserRequest;
import org.elasticsearch.client.security.ChangePasswordRequest;
import org.elasticsearch.client.security.PutRoleMappingRequest;
import org.elasticsearch.client.security.PutUserRequest;
import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -67,6 +71,34 @@ public class SecurityRequestConvertersTests extends ESTestCase {
assertToXContentBody(putUserRequest, request.getEntity());
}
public void testPutRoleMapping() throws IOException {
final String username = randomAlphaOfLengthBetween(4, 7);
final String rolename = randomAlphaOfLengthBetween(4, 7);
final String roleMappingName = randomAlphaOfLengthBetween(4, 7);
final String groupname = "cn="+randomAlphaOfLengthBetween(4, 7)+",dc=example,dc=com";
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
final Map<String, String> expectedParams;
if (refreshPolicy != RefreshPolicy.NONE) {
expectedParams = Collections.singletonMap("refresh", refreshPolicy.getValue());
} else {
expectedParams = Collections.emptyMap();
}
final RoleMapperExpression rules = AnyRoleMapperExpression.builder()
.addExpression(FieldRoleMapperExpression.ofUsername(username))
.addExpression(FieldRoleMapperExpression.ofGroups(groupname))
.build();
final PutRoleMappingRequest putRoleMappingRequest = new PutRoleMappingRequest(roleMappingName, true, Collections.singletonList(
rolename), rules, null, refreshPolicy);
final Request request = SecurityRequestConverters.putRoleMapping(putRoleMappingRequest);
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/security/role_mapping/" + roleMappingName, request.getEndpoint());
assertEquals(expectedParams, request.getParameters());
assertToXContentBody(putRoleMappingRequest, request.getEntity());
}
public void testEnableUser() {
final String username = randomAlphaOfLengthBetween(1, 12);
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());

View File

@ -19,6 +19,8 @@
package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.client.watcher.ActivateWatchRequest;
import org.elasticsearch.client.watcher.ActivateWatchResponse;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.client.watcher.ActionStatus;
@ -33,6 +35,7 @@ import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
import org.elasticsearch.rest.RestStatus;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
public class WatcherIT extends ESRestHighLevelClientTestCase {
@ -108,4 +111,26 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
new AckWatchRequest("nonexistent"), RequestOptions.DEFAULT));
assertEquals(RestStatus.NOT_FOUND, exception.status());
}
public void testActivateWatchThatExists() throws Exception {
String watchId = randomAlphaOfLength(10);
createWatch(watchId);
ActivateWatchResponse activateWatchResponse1 = highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId),
RequestOptions.DEFAULT);
assertThat(activateWatchResponse1.getStatus().state().isActive(), is(true));
ActivateWatchResponse activateWatchResponse2 = highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId),
RequestOptions.DEFAULT);
assertThat(activateWatchResponse2.getStatus().state().isActive(), is(true));
assertThat(activateWatchResponse1.getStatus().state().getTimestamp(),
lessThan(activateWatchResponse2.getStatus().state().getTimestamp()));
}
public void testActivateWatchThatDoesNotExist() throws Exception {
String watchId = randomAlphaOfLength(10);
// exception when activating a not existing watcher
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () ->
highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId), RequestOptions.DEFAULT));
assertEquals(RestStatus.NOT_FOUND, exception.status());
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.watcher.ActivateWatchRequest;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
@ -97,4 +98,14 @@ public class WatcherRequestConvertersTests extends ESTestCase {
assertEquals(expectedEndpoint.toString(), request.getEndpoint());
assertThat(request.getEntity(), nullValue());
}
public void testActivateWatchRequestConversion() {
String watchId = randomAlphaOfLength(10);
ActivateWatchRequest activateWatchRequest = new ActivateWatchRequest(watchId);
Request request = WatcherRequestConverters.activateWatch(activateWatchRequest);
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/watcher/watch/" + watchId + "/_activate", request.getEndpoint());
assertThat(request.getEntity(), nullValue());
}
}

View File

@ -176,7 +176,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// <3>
}
if (shardInfo.getFailed() > 0) {
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
for (ReplicationResponse.ShardInfo.Failure failure :
shardInfo.getFailures()) {
String reason = failure.reason(); // <4>
}
}
@ -239,8 +240,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
{
IndexRequest request = new IndexRequest("posts", "doc", "async").source("field", "value");
ActionListener<IndexResponse> listener;
// tag::index-execute-listener
ActionListener<IndexResponse> listener = new ActionListener<IndexResponse>() {
listener = new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse indexResponse) {
// <1>
@ -305,8 +307,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
request = new UpdateRequest("posts", "doc", "1").fetchSource(true);
//tag::update-request-with-stored-script
Script stored =
new Script(ScriptType.STORED, null, "increment-field", parameters); // <1>
Script stored = new Script(
ScriptType.STORED, null, "increment-field", parameters); // <1>
request.script(stored); // <2>
//end::update-request-with-stored-script
updateResponse = client.update(request, RequestOptions.DEFAULT);
@ -359,7 +361,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
//end::update-request-with-doc-as-string
request.fetchSource(true);
// tag::update-execute
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
UpdateResponse updateResponse = client.update(
request, RequestOptions.DEFAULT);
// end::update-execute
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
@ -397,7 +400,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// <1>
}
if (shardInfo.getFailed() > 0) {
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
for (ReplicationResponse.ShardInfo.Failure failure :
shardInfo.getFailures()) {
String reason = failure.reason(); // <2>
}
}
@ -408,7 +412,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
UpdateRequest request = new UpdateRequest("posts", "type", "does_not_exist")
.doc("field", "value");
try {
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
UpdateResponse updateResponse = client.update(
request, RequestOptions.DEFAULT);
} catch (ElasticsearchException e) {
if (e.status() == RestStatus.NOT_FOUND) {
// <1>
@ -422,7 +427,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
.doc("field", "value")
.version(1);
try {
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
UpdateResponse updateResponse = client.update(
request, RequestOptions.DEFAULT);
} catch(ElasticsearchException e) {
if (e.status() == RestStatus.CONFLICT) {
// <1>
@ -445,7 +451,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
//tag::update-request-source-include
String[] includes = new String[]{"updated", "r*"};
String[] excludes = Strings.EMPTY_ARRAY;
request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1>
request.fetchSource(
new FetchSourceContext(true, includes, excludes)); // <1>
//end::update-request-source-include
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
@ -459,7 +466,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
//tag::update-request-source-exclude
String[] includes = Strings.EMPTY_ARRAY;
String[] excludes = new String[]{"updated"};
request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1>
request.fetchSource(
new FetchSourceContext(true, includes, excludes)); // <1>
//end::update-request-source-exclude
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
@ -508,8 +516,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
{
UpdateRequest request = new UpdateRequest("posts", "doc", "async").doc("reason", "async update").docAsUpsert(true);
ActionListener<UpdateResponse> listener;
// tag::update-execute-listener
ActionListener<UpdateResponse> listener = new ActionListener<UpdateResponse>() {
listener = new ActionListener<UpdateResponse>() {
@Override
public void onResponse(UpdateResponse updateResponse) {
// <1>
@ -548,12 +557,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::delete-request
DeleteRequest request = new DeleteRequest(
"posts", // <1>
"doc", // <2>
"1"); // <3>
"doc", // <2>
"1"); // <3>
// end::delete-request
// tag::delete-execute
DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
DeleteResponse deleteResponse = client.delete(
request, RequestOptions.DEFAULT);
// end::delete-execute
assertSame(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
@ -567,7 +577,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// <1>
}
if (shardInfo.getFailed() > 0) {
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
for (ReplicationResponse.ShardInfo.Failure failure :
shardInfo.getFailures()) {
String reason = failure.reason(); // <2>
}
}
@ -598,7 +609,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
{
// tag::delete-notfound
DeleteRequest request = new DeleteRequest("posts", "doc", "does_not_exist");
DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
DeleteResponse deleteResponse = client.delete(
request, RequestOptions.DEFAULT);
if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) {
// <1>
}
@ -612,8 +624,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::delete-conflict
try {
DeleteRequest request = new DeleteRequest("posts", "doc", "1").version(2);
DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
DeleteResponse deleteResponse = client.delete(
new DeleteRequest("posts", "doc", "1").version(2),
RequestOptions.DEFAULT);
} catch (ElasticsearchException exception) {
if (exception.status() == RestStatus.CONFLICT) {
// <1>
@ -628,8 +641,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
DeleteRequest request = new DeleteRequest("posts", "doc", "async");
ActionListener<DeleteResponse> listener;
// tag::delete-execute-listener
ActionListener<DeleteResponse> listener = new ActionListener<DeleteResponse>() {
listener = new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse deleteResponse) {
// <1>

View File

@ -26,6 +26,8 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.license.StartBasicRequest;
import org.elasticsearch.client.license.StartBasicResponse;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
@ -33,11 +35,15 @@ import org.elasticsearch.protocol.xpack.license.GetLicenseResponse;
import org.elasticsearch.protocol.xpack.license.LicensesStatus;
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
import org.elasticsearch.protocol.xpack.license.PutLicenseResponse;
import org.junit.After;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.client.LicensingIT.putTrialLicense;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasSize;
@ -50,8 +56,18 @@ import static org.hamcrest.Matchers.startsWith;
*/
public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase {
@BeforeClass
public static void checkForSnapshot() {
assumeTrue("Trial license used to rollback is only valid when tested against snapshot/test builds",
Build.CURRENT.isSnapshot());
}
@After
public void rollbackToTrial() throws IOException {
putTrialLicense();
}
public void testLicense() throws Exception {
assumeTrue("License is only valid when tested against snapshot/test builds", Build.CURRENT.isSnapshot());
RestHighLevelClient client = highLevelClient();
String license = "{\"license\": {\"uid\":\"893361dc-9749-4997-93cb-802e3d7fa4a8\",\"type\":\"gold\"," +
"\"issue_date_in_millis\":1411948800000,\"expiry_date_in_millis\":1914278399999,\"max_nodes\":1,\"issued_to\":\"issued_to\"," +
@ -215,4 +231,50 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase {
assertThat(currentLicense, endsWith("}"));
}
}
public void testPostStartBasic() throws Exception {
RestHighLevelClient client = highLevelClient();
{
//tag::start-basic-execute
StartBasicRequest request = new StartBasicRequest();
StartBasicResponse response = client.license().startBasic(request, RequestOptions.DEFAULT);
//end::start-basic-execute
//tag::start-basic-response
boolean acknowledged = response.isAcknowledged(); // <1>
boolean basicStarted = response.isBasicStarted(); // <2>
String errorMessage = response.getErrorMessage(); // <3>
String acknowledgeMessage = response.getAcknowledgeMessage(); // <4>
Map<String, String[]> acknowledgeMessages = response.getAcknowledgeMessages(); // <5>
//end::start-basic-response
}
{
StartBasicRequest request = new StartBasicRequest();
// tag::start-basic-listener
ActionListener<StartBasicResponse> listener = new ActionListener<StartBasicResponse>() {
@Override
public void onResponse(StartBasicResponse indexResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-basic-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::start-basic-execute-async
client.license().startBasicAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::start-basic-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
}

View File

@ -36,6 +36,8 @@ import org.elasticsearch.client.rollup.GetRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
import org.elasticsearch.client.rollup.PutRollupJobResponse;
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
@ -280,4 +282,53 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
}
});
}
public void testDeleteRollupJob() throws Exception {
RestHighLevelClient client = highLevelClient();
String id = "job_2";
// tag::rollup-delete-job-request
DeleteRollupJobRequest request = new DeleteRollupJobRequest(id); // <1>
// end::rollup-delete-job-request
try {
// tag::rollup-delete-job-execute
DeleteRollupJobResponse response = client.rollup().deleteRollupJob(request, RequestOptions.DEFAULT);
// end::rollup-delete-job-execute
// tag::rollup-delete-job-response
response.isAcknowledged(); // <1>
// end::rollup-delete-job-response
} catch (Exception e) {
// Swallow any exception, this test does not test actually cancelling.
}
// tag::rollup-delete-job-execute-listener
ActionListener<DeleteRollupJobResponse> listener = new ActionListener<DeleteRollupJobResponse>() {
@Override
public void onResponse(DeleteRollupJobResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::rollup-delete-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::rollup-delete-job-execute-async
client.rollup().deleteRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::rollup-delete-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}

View File

@ -26,13 +26,23 @@ import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.security.ChangePasswordRequest;
import org.elasticsearch.client.security.DisableUserRequest;
import org.elasticsearch.client.security.EmptyResponse;
import org.elasticsearch.client.security.EnableUserRequest;
import org.elasticsearch.client.security.GetSslCertificatesResponse;
import org.elasticsearch.client.security.PutRoleMappingRequest;
import org.elasticsearch.client.security.PutRoleMappingResponse;
import org.elasticsearch.client.security.PutUserRequest;
import org.elasticsearch.client.security.PutUserResponse;
import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.client.security.EmptyResponse;
import org.elasticsearch.client.security.support.CertificateInfo;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.hamcrest.Matchers;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@ -86,6 +96,58 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testPutRoleMapping() throws Exception {
final RestHighLevelClient client = highLevelClient();
{
// tag::put-role-mapping-execute
final RoleMapperExpression rules = AnyRoleMapperExpression.builder()
.addExpression(FieldRoleMapperExpression.ofUsername("*"))
.addExpression(FieldRoleMapperExpression.ofGroups("cn=admins,dc=example,dc=com"))
.build();
final PutRoleMappingRequest request = new PutRoleMappingRequest("mapping-example", true, Collections.singletonList("superuser"),
rules, null, RefreshPolicy.NONE);
final PutRoleMappingResponse response = client.security().putRoleMapping(request, RequestOptions.DEFAULT);
// end::put-role-mapping-execute
// tag::put-role-mapping-response
boolean isCreated = response.isCreated(); // <1>
// end::put-role-mapping-response
assertTrue(isCreated);
}
{
final RoleMapperExpression rules = AnyRoleMapperExpression.builder()
.addExpression(FieldRoleMapperExpression.ofUsername("*"))
.addExpression(FieldRoleMapperExpression.ofGroups("cn=admins,dc=example,dc=com"))
.build();
final PutRoleMappingRequest request = new PutRoleMappingRequest("mapping-example", true, Collections.singletonList("superuser"),
rules, null, RefreshPolicy.NONE);
// tag::put-role-mapping-execute-listener
ActionListener<PutRoleMappingResponse> listener = new ActionListener<PutRoleMappingResponse>() {
@Override
public void onResponse(PutRoleMappingResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-role-mapping-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-role-mapping-execute-async
client.security().putRoleMappingAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-role-mapping-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testEnableUser() throws Exception {
RestHighLevelClient client = highLevelClient();
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
@ -175,6 +237,87 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testGetSslCertificates() throws Exception {
RestHighLevelClient client = highLevelClient();
{
//tag::get-certificates-execute
GetSslCertificatesResponse response = client.security().getSslCertificates(RequestOptions.DEFAULT);
//end::get-certificates-execute
assertNotNull(response);
//tag::get-certificates-response
List<CertificateInfo> certificates = response.getCertificates(); // <1>
//end::get-certificates-response
assertThat(certificates.size(), Matchers.equalTo(9));
final Iterator<CertificateInfo> it = certificates.iterator();
CertificateInfo c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=testnode-client-profile"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=Elasticsearch Test Node, OU=elasticsearch, O=org"));
assertThat(c.getPath(), Matchers.equalTo("testnode.crt"));
assertThat(c.getFormat(), Matchers.equalTo("PEM"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=OpenLDAP, OU=Elasticsearch, O=Elastic, L=Mountain View, ST=CA, C=US"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=Elasticsearch Test Node, OU=elasticsearch, O=org"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=Elasticsearch Test Client, OU=elasticsearch, O=org"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=ad-ELASTICSEARCHAD-CA, DC=ad, DC=test, DC=elasticsearch, DC=com"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=Elasticsearch Test Node"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=samba4"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
c = it.next();
assertThat(c.getSubjectDn(), Matchers.equalTo("CN=Elasticsearch Test Node"));
assertThat(c.getPath(), Matchers.equalTo("testnode.jks"));
assertThat(c.getFormat(), Matchers.equalTo("jks"));
}
{
// tag::get-certificates-execute-listener
ActionListener<GetSslCertificatesResponse> listener = new ActionListener<GetSslCertificatesResponse>() {
@Override
public void onResponse(GetSslCertificatesResponse getSslCertificatesResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-certificates-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-certificates-execute-async
client.security().getSslCertificatesAsync(RequestOptions.DEFAULT, listener); // <1>
// end::get-certificates-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testChangePassword() throws Exception {
RestHighLevelClient client = highLevelClient();
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};

View File

@ -25,6 +25,8 @@ import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.watcher.ActivateWatchRequest;
import org.elasticsearch.client.watcher.ActivateWatchResponse;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.client.watcher.ActionStatus;
@ -160,9 +162,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
}
{
//tag::ack-watch-execute
//tag::ack-watch-request
AckWatchRequest request = new AckWatchRequest("my_watch_id", // <1>
"logme", "emailme"); // <2>
//end::ack-watch-request
//tag::ack-watch-execute
AckWatchResponse response = client.watcher().ackWatch(request, RequestOptions.DEFAULT);
//end::ack-watch-execute
@ -203,4 +208,60 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testActivateWatch() throws Exception {
RestHighLevelClient client = highLevelClient();
{
BytesReference watch = new BytesArray("{ \n" +
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
" \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" +
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
"}");
PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
request.setActive(false); // <1>
PutWatchResponse response = client.watcher().putWatch(request, RequestOptions.DEFAULT);
}
{
//tag::activate-watch-request
ActivateWatchRequest request = new ActivateWatchRequest("my_watch_id");
ActivateWatchResponse response = client.watcher().activateWatch(request, RequestOptions.DEFAULT);
//end::activate-watch-request
//tag::activate-watch-response
WatchStatus watchStatus = response.getStatus(); // <1>
//end::activate-watch-response
assertTrue(watchStatus.state().isActive());
}
{
ActivateWatchRequest request = new ActivateWatchRequest("my_watch_id");
//tag::activate-watch-request-listener
ActionListener<ActivateWatchResponse> listener = new ActionListener<ActivateWatchResponse>() {
@Override
public void onResponse(ActivateWatchResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
//end::activate-watch-request-listener
//Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
//tag::activate-watch-request-async
client.watcher().activateWatchAsync(request, RequestOptions.DEFAULT, listener); // <1>
//end::activate-watch-request-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
}

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.license;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.protocol.xpack.common.ProtocolUtils;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
public class StartBasicResponseTests extends ESTestCase {
public void testFromXContent() throws Exception {
StartBasicResponse.Status status = randomFrom(StartBasicResponse.Status.values());
boolean acknowledged = status != StartBasicResponse.Status.NEED_ACKNOWLEDGEMENT;
String acknowledgeMessage = null;
Map<String, String[]> ackMessages = Collections.emptyMap();
if (status != StartBasicResponse.Status.GENERATED_BASIC) {
acknowledgeMessage = randomAlphaOfLength(10);
ackMessages = randomAckMessages();
}
final StartBasicResponse startBasicResponse = new StartBasicResponse(status, ackMessages, acknowledgeMessage);
XContentType xContentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
toXContent(startBasicResponse, builder);
final StartBasicResponse response = StartBasicResponse.fromXContent(createParser(builder));
assertThat(response.isAcknowledged(), equalTo(acknowledged));
assertThat(response.isBasicStarted(), equalTo(status.isBasicStarted()));
assertThat(response.getAcknowledgeMessage(), equalTo(acknowledgeMessage));
assertThat(ProtocolUtils.equals(response.getAcknowledgeMessages(), ackMessages), equalTo(true));
}
private static void toXContent(StartBasicResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("acknowledged", response.isAcknowledged());
if (response.isBasicStarted()) {
builder.field("basic_was_started", true);
} else {
builder.field("basic_was_started", false);
builder.field("error_message", response.getErrorMessage());
}
if (response.getAcknowledgeMessages().isEmpty() == false) {
builder.startObject("acknowledge");
builder.field("message", response.getAcknowledgeMessage());
for (Map.Entry<String, String[]> entry : response.getAcknowledgeMessages().entrySet()) {
builder.startArray(entry.getKey());
for (String message : entry.getValue()) {
builder.value(message);
}
builder.endArray();
}
builder.endObject();
}
builder.endObject();
}
private static Map<String, String[]> randomAckMessages() {
int nFeatures = randomIntBetween(1, 5);
Map<String, String[]> ackMessages = new HashMap<>();
for (int i = 0; i < nFeatures; i++) {
String feature = randomAlphaOfLengthBetween(9, 15);
int nMessages = randomIntBetween(1, 5);
String[] messages = new String[nMessages];
for (int j = 0; j < nMessages; j++) {
messages[j] = randomAlphaOfLengthBetween(10, 30);
}
ackMessages.put(feature, messages);
}
return ackMessages;
}
}

View File

@ -34,12 +34,4 @@ public class DeleteJobRequestTests extends ESTestCase {
ex = expectThrows(NullPointerException.class, () -> createTestInstance().setJobId(null));
assertEquals("[job_id] must not be null", ex.getMessage());
}
public void test_WithForce() {
DeleteJobRequest deleteJobRequest = createTestInstance();
assertFalse(deleteJobRequest.isForce());
deleteJobRequest.setForce(true);
assertTrue(deleteJobRequest.isForce());
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class DeleteJobResponseTests extends AbstractXContentTestCase<DeleteJobResponse> {
@Override
protected DeleteJobResponse createTestInstance() {
if (randomBoolean()) {
return new DeleteJobResponse(randomBoolean(), null);
}
return new DeleteJobResponse(null, new TaskId(randomAlphaOfLength(20) + ":" + randomIntBetween(1, 100)));
}
@Override
protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException {
return DeleteJobResponse.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class GetDatafeedStatsRequestTests extends AbstractXContentTestCase<GetDatafeedStatsRequest> {
public void testAllDatafeedsRequest() {
GetDatafeedStatsRequest request = GetDatafeedStatsRequest.getAllDatafeedStatsRequest();
assertEquals(request.getDatafeedIds().size(), 1);
assertEquals(request.getDatafeedIds().get(0), "_all");
}
public void testNewWithDatafeedId() {
Exception exception = expectThrows(NullPointerException.class, () -> new GetDatafeedStatsRequest("datafeed", null));
assertEquals(exception.getMessage(), "datafeedIds must not contain null values");
}
@Override
protected GetDatafeedStatsRequest createTestInstance() {
int datafeedCount = randomIntBetween(0, 10);
List<String> datafeedIds = new ArrayList<>(datafeedCount);
for (int i = 0; i < datafeedCount; i++) {
datafeedIds.add(randomAlphaOfLength(10));
}
GetDatafeedStatsRequest request = new GetDatafeedStatsRequest(datafeedIds);
if (randomBoolean()) {
request.setAllowNoDatafeeds(randomBoolean());
}
return request;
}
@Override
protected GetDatafeedStatsRequest doParseInstance(XContentParser parser) throws IOException {
return GetDatafeedStatsRequest.PARSER.parse(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.datafeed.DatafeedStatsTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
public class GetDatafeedStatsResponseTests extends AbstractXContentTestCase<GetDatafeedStatsResponse> {
@Override
protected GetDatafeedStatsResponse createTestInstance() {
int count = randomIntBetween(1, 5);
List<DatafeedStats> results = new ArrayList<>(count);
for(int i = 0; i < count; i++) {
results.add(DatafeedStatsTests.createRandomInstance());
}
return new GetDatafeedStatsResponse(results, count);
}
@Override
protected GetDatafeedStatsResponse doParseInstance(XContentParser parser) throws IOException {
return GetDatafeedStatsResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return field -> !field.isEmpty();
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class PreviewDatafeedRequestTests extends AbstractXContentTestCase<PreviewDatafeedRequest> {
@Override
protected PreviewDatafeedRequest createTestInstance() {
return new PreviewDatafeedRequest(DatafeedConfigTests.randomValidDatafeedId());
}
@Override
protected PreviewDatafeedRequest doParseInstance(XContentParser parser) throws IOException {
return PreviewDatafeedRequest.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.stream.Collectors;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class PreviewDatafeedResponseTests extends ESTestCase {
protected PreviewDatafeedResponse createTestInstance() throws IOException {
//This is just to create a random object to stand in the place of random data
DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandom();
BytesReference bytes = XContentHelper.toXContent(datafeedConfig, XContentType.JSON, false);
return new PreviewDatafeedResponse(bytes);
}
public void testGetDataList() throws IOException {
String rawData = "[\n" +
" {\n" +
" \"time\": 1454803200000,\n" +
" \"airline\": \"JZA\",\n" +
" \"doc_count\": 5,\n" +
" \"responsetime\": 990.4628295898438\n" +
" },\n" +
" {\n" +
" \"time\": 1454803200000,\n" +
" \"airline\": \"JBU\",\n" +
" \"doc_count\": 23,\n" +
" \"responsetime\": 877.5927124023438\n" +
" },\n" +
" {\n" +
" \"time\": 1454803200000,\n" +
" \"airline\": \"KLM\",\n" +
" \"doc_count\": 42,\n" +
" \"responsetime\": 1355.481201171875\n" +
" }\n" +
"]";
BytesReference bytes = new BytesArray(rawData);
PreviewDatafeedResponse response = new PreviewDatafeedResponse(bytes);
assertThat(response.getDataList()
.stream()
.map(map -> (String)map.get("airline"))
.collect(Collectors.toList()), containsInAnyOrder("JZA", "JBU", "KLM"));
rawData = "{\"key\":\"my_value\"}";
bytes = new BytesArray(rawData);
response = new PreviewDatafeedResponse(bytes);
assertThat(response.getDataList()
.stream()
.map(map -> (String)map.get("key"))
.collect(Collectors.toList()), containsInAnyOrder("my_value"));
}
//Because this is raw a BytesReference, the shuffling done via `AbstractXContentTestCase` is unacceptable and causes equality failures
public void testSerializationDeserialization() throws IOException {
for (int runs = 0; runs < 20; runs++) {
XContentType xContentType = XContentType.JSON;
PreviewDatafeedResponse testInstance = createTestInstance();
BytesReference originalXContent = XContentHelper.toXContent(testInstance, xContentType, false);
XContentParser parser = this.createParser(xContentType.xContent(), originalXContent);
PreviewDatafeedResponse parsed = PreviewDatafeedResponse.fromXContent(parser);
assertEquals(testInstance, parsed);
assertToXContentEquivalent(
XContentHelper.toXContent(testInstance, xContentType, false),
XContentHelper.toXContent(parsed, xContentType, false),
xContentType);
}
}
}

View File

@ -0,0 +1,75 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.client.ml.NodeAttributes;
import org.elasticsearch.client.ml.NodeAttributesTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Predicate;
public class DatafeedStatsTests extends AbstractXContentTestCase<DatafeedStats> {
public static DatafeedStats createRandomInstance() {
String datafeedId = DatafeedConfigTests.randomValidDatafeedId();
DatafeedState datafeedState =
randomFrom(DatafeedState.STARTED, DatafeedState.STARTING, DatafeedState.STOPPED, DatafeedState.STOPPING);
NodeAttributes nodeAttributes = null;
if (randomBoolean()) {
NodeAttributes randomAttributes = NodeAttributesTests.createRandom();
int numberOfAttributes = randomIntBetween(1, 10);
Map<String, String> attributes = new HashMap<>(numberOfAttributes);
for(int i = 0; i < numberOfAttributes; i++) {
String val = randomAlphaOfLength(10);
attributes.put("ml.key-"+i, val);
}
nodeAttributes = new NodeAttributes(randomAttributes.getId(),
randomAttributes.getName(),
randomAttributes.getEphemeralId(),
randomAttributes.getTransportAddress(),
attributes);
}
String assignmentReason = randomBoolean() ? randomAlphaOfLength(10) : null;
return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentReason);
}
@Override
protected DatafeedStats createTestInstance() {
return createRandomInstance();
}
@Override
protected DatafeedStats doParseInstance(XContentParser parser) throws IOException {
return DatafeedStats.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return field -> field.equals("node.attributes");
}
}

View File

@ -34,9 +34,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class JobTests extends AbstractXContentTestCase<Job> {
@ -77,93 +75,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
assertNotNull(Job.PARSER.apply(parser, null).build());
}
public void testEquals_GivenDifferentClass() {
Job job = buildJobBuilder("foo").build();
assertFalse(job.equals("a string"));
}
public void testEquals_GivenDifferentIds() {
Date createTime = new Date();
Job.Builder builder = buildJobBuilder("foo");
builder.setCreateTime(createTime);
Job job1 = builder.build();
builder.setId("bar");
Job job2 = builder.build();
assertFalse(job1.equals(job2));
}
public void testEquals_GivenDifferentRenormalizationWindowDays() {
Date date = new Date();
Job.Builder jobDetails1 = new Job.Builder("foo");
jobDetails1.setDataDescription(new DataDescription.Builder());
jobDetails1.setAnalysisConfig(createAnalysisConfig());
jobDetails1.setRenormalizationWindowDays(3L);
jobDetails1.setCreateTime(date);
Job.Builder jobDetails2 = new Job.Builder("foo");
jobDetails2.setDataDescription(new DataDescription.Builder());
jobDetails2.setRenormalizationWindowDays(4L);
jobDetails2.setAnalysisConfig(createAnalysisConfig());
jobDetails2.setCreateTime(date);
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
}
public void testEquals_GivenDifferentBackgroundPersistInterval() {
Date date = new Date();
Job.Builder jobDetails1 = new Job.Builder("foo");
jobDetails1.setDataDescription(new DataDescription.Builder());
jobDetails1.setAnalysisConfig(createAnalysisConfig());
jobDetails1.setBackgroundPersistInterval(TimeValue.timeValueSeconds(10000L));
jobDetails1.setCreateTime(date);
Job.Builder jobDetails2 = new Job.Builder("foo");
jobDetails2.setDataDescription(new DataDescription.Builder());
jobDetails2.setBackgroundPersistInterval(TimeValue.timeValueSeconds(8000L));
jobDetails2.setAnalysisConfig(createAnalysisConfig());
jobDetails2.setCreateTime(date);
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
}
public void testEquals_GivenDifferentModelSnapshotRetentionDays() {
Date date = new Date();
Job.Builder jobDetails1 = new Job.Builder("foo");
jobDetails1.setDataDescription(new DataDescription.Builder());
jobDetails1.setAnalysisConfig(createAnalysisConfig());
jobDetails1.setModelSnapshotRetentionDays(10L);
jobDetails1.setCreateTime(date);
Job.Builder jobDetails2 = new Job.Builder("foo");
jobDetails2.setDataDescription(new DataDescription.Builder());
jobDetails2.setModelSnapshotRetentionDays(8L);
jobDetails2.setAnalysisConfig(createAnalysisConfig());
jobDetails2.setCreateTime(date);
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
}
public void testEquals_GivenDifferentResultsRetentionDays() {
Date date = new Date();
Job.Builder jobDetails1 = new Job.Builder("foo");
jobDetails1.setDataDescription(new DataDescription.Builder());
jobDetails1.setAnalysisConfig(createAnalysisConfig());
jobDetails1.setCreateTime(date);
jobDetails1.setResultsRetentionDays(30L);
Job.Builder jobDetails2 = new Job.Builder("foo");
jobDetails2.setDataDescription(new DataDescription.Builder());
jobDetails2.setResultsRetentionDays(4L);
jobDetails2.setAnalysisConfig(createAnalysisConfig());
jobDetails2.setCreateTime(date);
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
}
public void testEquals_GivenDifferentCustomSettings() {
Job.Builder jobDetails1 = buildJobBuilder("foo");
Map<String, Object> customSettings1 = new HashMap<>();
customSettings1.put("key1", "value1");
jobDetails1.setCustomSettings(customSettings1);
Job.Builder jobDetails2 = buildJobBuilder("foo");
Map<String, Object> customSettings2 = new HashMap<>();
customSettings2.put("key2", "value2");
jobDetails2.setCustomSettings(customSettings2);
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
}
public void testCopyConstructor() {
for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) {
Job job = createTestInstance();
@ -184,20 +95,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
assertEquals("[job_type] must not be null", ex.getMessage());
}
public static Job.Builder buildJobBuilder(String id, Date date) {
Job.Builder builder = new Job.Builder(id);
builder.setCreateTime(date);
AnalysisConfig.Builder ac = createAnalysisConfig();
DataDescription.Builder dc = new DataDescription.Builder();
builder.setAnalysisConfig(ac);
builder.setDataDescription(dc);
return builder;
}
public static Job.Builder buildJobBuilder(String id) {
return buildJobBuilder(id, new Date());
}
public static String randomValidJobId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
@ -228,9 +125,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
if (randomBoolean()) {
builder.setFinishedTime(new Date(randomNonNegativeLong()));
}
if (randomBoolean()) {
builder.setLastDataTime(new Date(randomNonNegativeLong()));
}
if (randomBoolean()) {
builder.setEstablishedModelMemory(randomNonNegativeLong());
}
@ -265,6 +159,9 @@ public class JobTests extends AbstractXContentTestCase<Job> {
if (randomBoolean()) {
builder.setResultsIndexName(randomValidJobId());
}
if (randomBoolean()) {
builder.setDeleting(randomBoolean());
}
return builder;
}

View File

@ -38,6 +38,9 @@ public class AnomalyRecordTests extends AbstractXContentTestCase<AnomalyRecord>
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
anomalyRecord.setProbability(randomDouble());
if (randomBoolean()) {
anomalyRecord.setMultiBucketImpact(randomDouble());
}
anomalyRecord.setRecordScore(randomDouble());
anomalyRecord.setInitialRecordScore(randomDouble());
anomalyRecord.setInterim(randomBoolean());

View File

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.junit.Before;
import java.io.IOException;
public class DeleteRollupJobRequestTests extends AbstractXContentTestCase<DeleteRollupJobRequest> {
private String jobId;
@Before
public void setUpOptionalId() {
jobId = randomAlphaOfLengthBetween(1, 10);
}
@Override
protected DeleteRollupJobRequest createTestInstance() {
return new DeleteRollupJobRequest(jobId);
}
@Override
protected DeleteRollupJobRequest doParseInstance(final XContentParser parser) throws IOException {
return DeleteRollupJobRequest.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
public void testRequireConfiguration() {
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new DeleteRollupJobRequest(null));
assertEquals("id parameter must not be null", e.getMessage());
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.junit.Before;
import java.io.IOException;
public class DeleteRollupJobResponseTests extends AbstractXContentTestCase<DeleteRollupJobResponse> {
private boolean acknowledged;
@Before
public void setupJobID() {
acknowledged = randomBoolean();
}
@Override
protected DeleteRollupJobResponse createTestInstance() {
return new DeleteRollupJobResponse(acknowledged);
}
@Override
protected DeleteRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
return DeleteRollupJobResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
}

View File

@ -0,0 +1,177 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
public class PutRoleMappingRequestTests extends ESTestCase {
public void testPutRoleMappingRequest() {
final String name = randomAlphaOfLength(5);
final boolean enabled = randomBoolean();
final List<String> roles = Collections.singletonList("superuser");
final RoleMapperExpression rules = FieldRoleMapperExpression.ofUsername("user");
final Map<String, Object> metadata = new HashMap<>();
metadata.put("k1", "v1");
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
PutRoleMappingRequest putRoleMappingRequest = new PutRoleMappingRequest(name, enabled, roles, rules, metadata, refreshPolicy);
assertNotNull(putRoleMappingRequest);
assertThat(putRoleMappingRequest.getName(), equalTo(name));
assertThat(putRoleMappingRequest.isEnabled(), equalTo(enabled));
assertThat(putRoleMappingRequest.getRefreshPolicy(), equalTo((refreshPolicy == null) ? RefreshPolicy.getDefault() : refreshPolicy));
assertThat(putRoleMappingRequest.getRules(), equalTo(rules));
assertThat(putRoleMappingRequest.getRoles(), equalTo(roles));
assertThat(putRoleMappingRequest.getMetadata(), equalTo((metadata == null) ? Collections.emptyMap() : metadata));
}
public void testPutRoleMappingRequestThrowsExceptionForNullOrEmptyName() {
final String name = randomBoolean() ? null : "";
final boolean enabled = randomBoolean();
final List<String> roles = Collections.singletonList("superuser");
final RoleMapperExpression rules = FieldRoleMapperExpression.ofUsername("user");
final Map<String, Object> metadata = new HashMap<>();
metadata.put("k1", "v1");
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> new PutRoleMappingRequest(name, enabled,
roles, rules, metadata, refreshPolicy));
assertThat(ile.getMessage(), equalTo("role-mapping name is missing"));
}
public void testPutRoleMappingRequestThrowsExceptionForNullOrEmptyRoles() {
final String name = randomAlphaOfLength(5);
final boolean enabled = randomBoolean();
final List<String> roles = randomBoolean() ? null : Collections.emptyList();
final RoleMapperExpression rules = FieldRoleMapperExpression.ofUsername("user");
final Map<String, Object> metadata = new HashMap<>();
metadata.put("k1", "v1");
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> new PutRoleMappingRequest(name, enabled,
roles, rules, metadata, refreshPolicy));
assertThat(ile.getMessage(), equalTo("role-mapping roles are missing"));
}
public void testPutRoleMappingRequestThrowsExceptionForNullRules() {
final String name = randomAlphaOfLength(5);
final boolean enabled = randomBoolean();
final List<String> roles = Collections.singletonList("superuser");
final RoleMapperExpression rules = null;
final Map<String, Object> metadata = new HashMap<>();
metadata.put("k1", "v1");
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
expectThrows(NullPointerException.class, () -> new PutRoleMappingRequest(name, enabled, roles, rules, metadata, refreshPolicy));
}
public void testPutRoleMappingRequestToXContent() throws IOException {
final String name = randomAlphaOfLength(5);
final boolean enabled = randomBoolean();
final List<String> roles = Collections.singletonList("superuser");
final RoleMapperExpression rules = FieldRoleMapperExpression.ofUsername("user");
final Map<String, Object> metadata = new HashMap<>();
metadata.put("k1", "v1");
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
final PutRoleMappingRequest putRoleMappingRequest = new PutRoleMappingRequest(name, enabled, roles, rules, metadata, refreshPolicy);
final XContentBuilder builder = XContentFactory.jsonBuilder();
putRoleMappingRequest.toXContent(builder, ToXContent.EMPTY_PARAMS);
final String output = Strings.toString(builder);
final String expected =
"{"+
"\"enabled\":" + enabled + "," +
"\"roles\":[\"superuser\"]," +
"\"rules\":{" +
"\"field\":{\"username\":[\"user\"]}" +
"}," +
"\"metadata\":{\"k1\":\"v1\"}" +
"}";
assertThat(output, equalTo(expected));
}
public void testEqualsHashCode() {
final String name = randomAlphaOfLength(5);
final boolean enabled = randomBoolean();
final List<String> roles = Collections.singletonList("superuser");
final RoleMapperExpression rules = FieldRoleMapperExpression.ofUsername("user");
final Map<String, Object> metadata = new HashMap<>();
metadata.put("k1", "v1");
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
PutRoleMappingRequest putRoleMappingRequest = new PutRoleMappingRequest(name, enabled, roles, rules, metadata, refreshPolicy);
assertNotNull(putRoleMappingRequest);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(putRoleMappingRequest, (original) -> {
return new PutRoleMappingRequest(original.getName(), original.isEnabled(), original.getRoles(), original.getRules(), original
.getMetadata(), original.getRefreshPolicy());
});
EqualsHashCodeTestUtils.checkEqualsAndHashCode(putRoleMappingRequest, (original) -> {
return new PutRoleMappingRequest(original.getName(), original.isEnabled(), original.getRoles(), original.getRules(), original
.getMetadata(), original.getRefreshPolicy());
}, PutRoleMappingRequestTests::mutateTestItem);
}
private static PutRoleMappingRequest mutateTestItem(PutRoleMappingRequest original) {
switch (randomIntBetween(0, 4)) {
case 0:
return new PutRoleMappingRequest(randomAlphaOfLength(5), original.isEnabled(), original.getRoles(), original.getRules(),
original.getMetadata(), original.getRefreshPolicy());
case 1:
return new PutRoleMappingRequest(original.getName(), !original.isEnabled(), original.getRoles(), original.getRules(),
original.getMetadata(), original.getRefreshPolicy());
case 2:
return new PutRoleMappingRequest(original.getName(), original.isEnabled(), original.getRoles(),
FieldRoleMapperExpression.ofGroups("group"), original.getMetadata(), original.getRefreshPolicy());
case 3:
return new PutRoleMappingRequest(original.getName(), original.isEnabled(), original.getRoles(), original.getRules(),
Collections.emptyMap(), original.getRefreshPolicy());
case 4:
List<RefreshPolicy> values = Arrays.stream(RefreshPolicy.values())
.filter(rp -> rp != original.getRefreshPolicy())
.collect(Collectors.toList());
return new PutRoleMappingRequest(original.getName(), original.isEnabled(), original.getRoles(), original.getRules(), original
.getMetadata(), randomFrom(values));
default:
return new PutRoleMappingRequest(randomAlphaOfLength(5), original.isEnabled(), original.getRoles(), original.getRules(),
original.getMetadata(), original.getRefreshPolicy());
}
}
}

View File

@ -86,7 +86,7 @@ public class RoleMapperExpressionDslTests extends ESTestCase {
"]"+
"}";
assertThat(expected, equalTo(output));
assertThat(output, equalTo(expected));
}
public void testFieldRoleMapperExpressionThrowsExceptionForMissingMetadataPrefix() {

View File

@ -0,0 +1,113 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.XContentTestUtils;
import java.io.IOException;
import java.util.function.Predicate;
/**
* Basic unit tests for {@link ActivateWatchResponse}.
*
* Note that we only sanity check watch status parsing here, as there
* are dedicated tests for it in {@link WatchStatusTests}.
*/
public class ActivateWatchResponseTests extends ESTestCase {
public void testBasicParsing() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
.startObject("status")
.field("version", 42)
.field("execution_state", ExecutionState.ACKNOWLEDGED)
.startObject("state")
.field("active", false)
.endObject()
.endObject()
.endObject();
BytesReference bytes = BytesReference.bytes(builder);
ActivateWatchResponse response = parse(builder.contentType(), bytes);
WatchStatus status = response.getStatus();
assertNotNull(status);
assertEquals(42, status.version());
assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
assertFalse(status.state().isActive());
}
public void testParsingWithMissingStatus() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject().endObject();
BytesReference bytes = BytesReference.bytes(builder);
expectThrows(IllegalArgumentException.class, () -> parse(builder.contentType(), bytes));
}
public void testParsingWithNullStatus() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
.nullField("status")
.endObject();
BytesReference bytes = BytesReference.bytes(builder);
expectThrows(XContentParseException.class, () -> parse(builder.contentType(), bytes));
}
public void testParsingWithUnknownKeys() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
.startObject("status")
.field("version", 42)
.field("execution_state", ExecutionState.ACKNOWLEDGED)
.startObject("state")
.field("active", true)
.endObject()
.endObject()
.endObject();
BytesReference bytes = BytesReference.bytes(builder);
Predicate<String> excludeFilter = field -> field.equals("status.actions");
BytesReference bytesWithRandomFields = XContentTestUtils.insertRandomFields(
builder.contentType(), bytes, excludeFilter, random());
ActivateWatchResponse response = parse(builder.contentType(), bytesWithRandomFields);
WatchStatus status = response.getStatus();
assertNotNull(status);
assertEquals(42, status.version());
assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
assertTrue(status.state().isActive());
}
private ActivateWatchResponse parse(XContentType contentType, BytesReference bytes) throws IOException {
XContentParser parser = XContentFactory.xContent(contentType)
.createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput());
parser.nextToken();
return ActivateWatchResponse.fromXContent(parser);
}
}

View File

@ -0,0 +1,23 @@
-----BEGIN CERTIFICATE-----
MIID0zCCArugAwIBAgIJALi5bDfjMszLMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV
BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp
Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTUwOTIzMTg1MjU3WhcNMTkwOTIyMTg1MjU3
WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV
BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC
AQ8AMIIBCgKCAQEA3rGZ1QbsW0+MuyrSLmMfDFKtLBkIFW8V0gRuurFg1PUKKNR1
Mq2tMVwjjYETAU/UY0iKZOzjgvYPKhDTYBTte/WHR1ZK4CYVv7TQX/gtFQG/ge/c
7u0sLch9p7fbd+/HZiLS/rBEZDIohvgUvzvnA8+OIYnw4kuxKo/5iboAIS41klMg
/lATm8V71LMY68inht71/ZkQoAHKgcR9z4yNYvQ1WqKG8DG8KROXltll3sTrKbl5
zJhn660es/1ZnR6nvwt6xnSTl/mNHMjkfv1bs4rJ/py3qPxicdoSIn/KyojUcgHV
F38fuAy2CQTdjVG5fWj9iz+mQvLm3+qsIYQdFwIDAQABo4G/MIG8MAkGA1UdEwQC
MAAwHQYDVR0OBBYEFEMMWLWQi/g83PzlHYqAVnty5L7HMIGPBgNVHREEgYcwgYSC
CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds
b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s
b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL
BQADggEBAMjGGXT8Nt1tbl2GkiKtmiuGE2Ej66YuZ37WSJViaRNDVHLlg87TCcHe
k2rdO+6sFqQbbzEfwQ05T7xGmVu7tm54HwKMRugoQ3wct0bQC5wEWYN+oMDvSyO6
M28mZwWb4VtR2IRyWP+ve5DHwTM9mxWa6rBlGzsQqH6YkJpZojzqk/mQTug+Y8aE
mVoqRIPMHq9ob+S9qd5lp09+MtYpwPfTPx/NN+xMEooXWW/ARfpGhWPkg/FuCu4z
1tFmCqHgNcWirzMm3dQpF78muE9ng6OB2MXQwL4VgnVkxmlZNHbkR2v/t8MyZJxC
y4g6cTMM3S/UMt5/+aIB2JAuMKyuD+A=
-----END CERTIFICATE-----

Binary file not shown.

View File

@ -124,9 +124,9 @@ public class Response {
final Matcher matcher = WARNING_HEADER_PATTERN.matcher(warning);
if (matcher.matches()) {
warnings.add(matcher.group(1));
continue;
} else {
warnings.add(warning);
}
warnings.add(warning);
}
return warnings;
}

View File

@ -59,6 +59,7 @@ import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -70,10 +71,12 @@ import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes;
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -96,6 +99,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
private Node node;
private CloseableHttpAsyncClient httpClient;
private HostsTrackingFailureListener failureListener;
private boolean strictDeprecationMode;
@Before
@SuppressWarnings("unchecked")
@ -147,8 +151,9 @@ public class RestClientSingleHostTests extends RestClientTestCase {
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
node = new Node(new HttpHost("localhost", 9200));
failureListener = new HostsTrackingFailureListener();
strictDeprecationMode = randomBoolean();
restClient = new RestClient(httpClient, 10000, defaultHeaders,
singletonList(node), null, failureListener, NodeSelector.ANY, false);
singletonList(node), null, failureListener, NodeSelector.ANY, strictDeprecationMode);
}
/**
@ -331,9 +336,54 @@ public class RestClientSingleHostTests extends RestClientTestCase {
}
assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode));
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), Collections.<String>emptySet());
assertFalse(esResponse.hasWarnings());
}
}
public void testDeprecationWarnings() throws IOException {
String chars = randomAsciiAlphanumOfLength(5);
assertDeprecationWarnings(singletonList("poorly formatted " + chars), singletonList("poorly formatted " + chars));
assertDeprecationWarnings(singletonList(formatWarning(chars)), singletonList(chars));
assertDeprecationWarnings(
Arrays.asList(formatWarning(chars), "another one", "and another"),
Arrays.asList(chars, "another one", "and another"));
}
private void assertDeprecationWarnings(List<String> warningHeaderTexts, List<String> warningBodyTexts) throws IOException {
String method = randomFrom(getHttpMethods());
Request request = new Request(method, "/200");
RequestOptions.Builder options = request.getOptions().toBuilder();
for (String warningHeaderText : warningHeaderTexts) {
options.addHeader("Warning", warningHeaderText);
}
request.setOptions(options);
Response response;
if (strictDeprecationMode) {
try {
restClient.performRequest(request);
fail("expected ResponseException because strict deprecation mode is enabled");
return;
} catch (ResponseException e) {
assertThat(e.getMessage(), containsString("\nWarnings: " + warningBodyTexts));
response = e.getResponse();
}
} else {
response = restClient.performRequest(request);
}
assertTrue(response.hasWarnings());
assertEquals(warningBodyTexts, response.getWarnings());
}
/**
* Emulates Elasticsearch's DeprecationLogger.formatWarning in simple
* cases. We don't have that available because we're testing against 1.7.
*/
private static String formatWarning(String warningBody) {
return "299 Elasticsearch-1.2.2-SNAPSHOT-eeeeeee \"" + warningBody + "\" \"Mon, 01 Jan 2001 00:00:00 GMT\"";
}
private HttpUriRequest performRandomRequest(String method) throws Exception {
String uriAsString = "/" + randomStatusCode(getRandom());
Request request = new Request(method, uriAsString);

View File

@ -18,6 +18,17 @@
*/
integTestRunner {
systemProperty 'tests.logfile',
"${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }.log"
/*
* There are two unique things going on here:
* 1. These tests can be run against an external cluster with
* -Dtests.rest.cluster=whatever and -Dtest.cluster=whatever
* 2. *One* of these tests is incompatible with that and should be skipped
* when running against an external cluster.
*/
if (System.getProperty("tests.rest.cluster") == null) {
systemProperty 'tests.logfile',
"${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }.log"
} else {
systemProperty 'tests.logfile', '--external--'
}
}

View File

@ -40,6 +40,8 @@ public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
@Override
protected BufferedReader openReader(Path logFile) {
assumeFalse("Skipping test because it is being run against an external cluster.",
logFile.getFileName().toString().equals("--external--"));
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
try {
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);

View File

@ -149,27 +149,39 @@ subprojects {
task buildBwcVersion(type: Exec) {
dependsOn checkoutBwcBranch, writeBuildMetadata
// send RUNTIME_JAVA_HOME so the build doesn't fails on newer version the branch doesn't know about
environment('RUNTIME_JAVA_HOME', getJavaHome(it, rootProject.ext.minimumRuntimeVersion.getMajorVersion() as int))
workingDir = checkoutDir
// we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds
if (["5.6", "6.0", "6.1"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 8))
} else if ("6.2".equals(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 9))
} else if (["6.3", "6.4"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 10))
} else if (["6.x"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 11))
} else {
environment('JAVA_HOME', project.compilerJavaHome)
doFirst {
// Execution time so that the checkouts are available
List<String> lines = file("$checkoutDir/.ci/java-versions.properties").readLines()
environment(
'JAVA_HOME',
getJavaHome(it, Integer.parseInt(
lines
.findAll({ it.startsWith("ES_BUILD_JAVA=java") })
.collect({ it.replace("ES_BUILD_JAVA=java", "").trim() })
.join("!!")
))
)
environment(
'RUNTIME_JAVA_HOME',
getJavaHome(it, Integer.parseInt(
lines
.findAll({ it.startsWith("ES_RUNTIME_JAVA=java") })
.collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() })
.join("!!")
))
)
}
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd'
args '/C', 'call', new File(checkoutDir, 'gradlew').toString()
} else {
executable new File(checkoutDir, 'gradlew').toString()
}
if (gradle.startParameter.isOffline()) {
args "--offline"
}
for (String dir : projectDirs) {
args ":${dir.replace('/', ':')}:assemble"
}
@ -237,4 +249,4 @@ class IndentingOutputStream extends OutputStream {
}
}
}
}
}

View File

@ -228,7 +228,7 @@ final class JvmOptionsParser {
// no range is present, apply the JVM option to the specified major version only
upper = lower;
} else if (end == null) {
// a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specifed one
// a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specified one
upper = Integer.MAX_VALUE;
} else {
// a range of the form \\d+-\\d+ is present, apply the JVM option to the specified range of major versions

Some files were not shown because too many files have changed in this diff Show More