diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ba3e3c1175b..1f3166c326f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -161,7 +161,7 @@ Please follow these formatting guidelines: * Line width is 140 characters * The rest is left to Java coding standards * Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. -* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them. +* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE: * Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value. * IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value. * Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so. @@ -320,7 +320,7 @@ have to test Elasticsearch. #### Configurations Gradle organizes dependencies and build artifacts into "configurations" and -allows you to use these configurations arbitrarilly. Here are some of the most +allows you to use these configurations arbitrarily. Here are some of the most common configurations in our build and how we use them:
diff --git a/TESTING.asciidoc b/TESTING.asciidoc index d3321f7f8cc..d16d85b2ee1 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -1,4 +1,4 @@ -[[Testing Framework Cheatsheet]] +[[TestingFrameworkCheatsheet]] = Testing [partintro] @@ -250,7 +250,7 @@ Pass arbitrary jvm arguments. Running backwards compatibility tests is disabled by default since it requires a release version of elasticsearch to be present on the test system. -To run backwards compatibilty tests untar or unzip a release and run the tests +To run backwards compatibility tests untar or unzip a release and run the tests with the following command: --------------------------------------------------------------------------- diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 992462b6fc0..0dc1565e235 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -44,6 +44,8 @@ compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-u // needs to be added separately otherwise Gradle will quote it and javac will fail compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"]) +run.executable = new File(project.runtimeJavaHome, 'bin/java') + // classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes forbiddenApisMain.enabled = false diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java new file mode 100644 index 00000000000..3b0416c761d --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.benchmark.fs; + +import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +public class AvailableIndexFoldersBenchmark { + + private NodeEnvironment.NodePath nodePath; + private NodeEnvironment nodeEnv; + private Set excludedDirs; + + @Setup + public void setup() throws IOException { + Path path = Files.createTempDirectory("test"); + String[] paths = new String[] {path.toString()}; + nodePath = new NodeEnvironment.NodePath(path); + + LogConfigurator.setNodeName("test"); + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), path) + .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); + nodeEnv = new NodeEnvironment(settings, new Environment(settings, null)); + + Files.createDirectories(nodePath.indicesPath); + excludedDirs = new HashSet<>(); + int numIndices = 5000; + for (int i = 0; i < numIndices; i++) { + String dirName = "dir" + i; + Files.createDirectory(nodePath.indicesPath.resolve(dirName)); + excludedDirs.add(dirName); + } + if (nodeEnv.availableIndexFoldersForPath(nodePath).size() != numIndices) { + throw new IllegalStateException("bad size"); + } + if (nodeEnv.availableIndexFoldersForPath(nodePath, excludedDirs::contains).size() != 0) { + throw new IllegalStateException("bad size"); + } + } + + + @Benchmark + public Set availableIndexFolderNaive() throws IOException { + return nodeEnv.availableIndexFoldersForPath(nodePath); + } + + @Benchmark + public Set availableIndexFolderOptimized() throws IOException { + return nodeEnv.availableIndexFoldersForPath(nodePath, excludedDirs::contains); + } + +} diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy index 2b61165608d..e5500d60093 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy @@ -264,7 +264,11 @@ class RandomizedTestingTask extends DefaultTask { throw new InvalidUserDataException('Seed should be ' + 'set on the project instead of a system property') } - sysproperty key: prop.getKey(), value: prop.getValue().toString() + if (prop.getValue() instanceof Closure) { + sysproperty key: prop.getKey(), value: (prop.getValue() as Closure).call().toString() + } else { + sysproperty key: prop.getKey(), value: prop.getValue().toString() + } } systemProperty 'tests.seed', project.testSeed for (Map.Entry envvar : environmentVariables) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy index daab0efc8c6..063dcf7d3bb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy @@ -122,7 +122,7 @@ class VersionCollection { if (isReleased(version) == false) { // caveat 1 - This should only ever contain 2 non released branches in flight. An example is 6.x is frozen, // and 6.2 is cut but not yet released there is some simple logic to make sure that in the case of more than 2, - // it will bail. The order is that the minor snapshot is fufilled first, and then the staged minor snapshot + // it will bail. The order is that the minor snapshot is fulfilled first, and then the staged minor snapshot if (nextMinorSnapshot == null) { // it has not been set yet nextMinorSnapshot = replaceAsSnapshot(version) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index f2105086f25..6578ce60826 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -72,7 +72,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { /** * Root directory containing all the files generated by this task. It is - * contained withing testRoot. + * contained within testRoot. */ File outputRoot() { return new File(testRoot, '/rest-api-spec/test') @@ -226,10 +226,10 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { } else { current.println('---') current.println("\"line_$test.start\":") - /* The Elasticsearch test runner doesn't support the warnings - * construct unless you output this skip. Since we don't know - * if this snippet will use the warnings construct we emit this - * warning every time. */ + /* The Elasticsearch test runner doesn't support quite a few + * constructs unless we output this skip. We don't know if + * we're going to use these constructs, but we might so we + * output the skip just in case. */ current.println(" - skip:") current.println(" features: ") current.println(" - default_shards") @@ -250,13 +250,13 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { } } } - if (test.skipTest) { + if (test.skip) { if (test.continued) { throw new InvalidUserDataException("Continued snippets " + "can't be skipped") } current.println(" - always_skip") - current.println(" reason: $test.skipTest") + current.println(" reason: $test.skip") } if (test.setup != null) { // Insert a setup defined outside of the docs @@ -274,9 +274,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { } private void response(Snippet response) { - current.println(" - match: ") - current.println(" \$body: ") - response.contents.eachLine { current.println(" $it") } + if (null == response.skip) { + current.println(" - match: ") + current.println(" \$body: ") + response.contents.eachLine { current.println(" $it") } + } } void emitDo(String method, String pathAndQuery, String body, diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy index 8c0eedeb6f5..f5decd6db70 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy @@ -122,7 +122,9 @@ public class SnippetsTask extends DefaultTask { + "contain `curl`.") } } - if (snippet.testResponse && snippet.language == 'js') { + if (snippet.testResponse + && 'js' == snippet.language + && null == snippet.skip) { String quoted = snippet.contents // quote values starting with $ .replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"') @@ -216,7 +218,7 @@ public class SnippetsTask extends DefaultTask { return } if (it.group(4) != null) { - snippet.skipTest = it.group(4) + snippet.skip = it.group(4) return } if (it.group(5) != null) { @@ -249,7 +251,7 @@ public class SnippetsTask extends DefaultTask { substitutions = [] } String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT) ?/) { + parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT|$SKIP) ?/) { if (it.group(1) != null) { // TESTRESPONSE[s/adsf/jkl/] substitutions.add([it.group(1), it.group(2)]) @@ -259,6 +261,9 @@ public class SnippetsTask extends DefaultTask { substitutions.add(['\n$', '\\\\s*/']) substitutions.add(['( +)', '$1\\\\s+']) substitutions.add(['\n', '\\\\s*\n ']) + } else if (it.group(4) != null) { + // TESTRESPONSE[skip:reason] + snippet.skip = it.group(4) } } } @@ -312,7 +317,7 @@ public class SnippetsTask extends DefaultTask { boolean test = false boolean testResponse = false boolean testSetup = false - String skipTest = null + String skip = null boolean continued = false String language = null String catchPart = null @@ -337,8 +342,8 @@ public class SnippetsTask extends DefaultTask { if (catchPart) { result += "[catch: $catchPart]" } - if (skipTest) { - result += "[skip=$skipTest]" + if (skip) { + result += "[skip=$skip]" } if (continued) { result += '[continued]' @@ -352,6 +357,9 @@ public class SnippetsTask extends DefaultTask { } if (testResponse) { result += '// TESTRESPONSE' + if (skip) { + result += "[skip=$skip]" + } } if (testSetup) { result += '// TESTSETUP' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index aaf4e468182..b96bdfae200 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -337,7 +337,7 @@ class NodeInfo { case 'deb': return new File(baseDir, "${distro}-extracted/etc/elasticsearch") default: - throw new InvalidUserDataException("Unkown distribution: ${distro}") + throw new InvalidUserDataException("Unknown distribution: ${distro}") } } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java index 60de1981f98..7a2504efdd0 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java @@ -43,7 +43,7 @@ public class JdkJarHellCheck { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { String entry = root.relativize(file).toString().replace('\\', '/'); - if (entry.endsWith(".class")) { + if (entry.endsWith(".class") && entry.endsWith("module-info.class") == false) { if (ext.getResource(entry) != null) { detected.add( entry diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java index 7e4766ada65..bffa011cb7b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java @@ -20,12 +20,12 @@ package org.elasticsearch.gradle.precommit; import org.apache.commons.io.output.NullOutputStream; import org.elasticsearch.gradle.JdkJarHellCheck; -import org.elasticsearch.test.NamingConventionsCheck; import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.JavaVersion; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; +import org.gradle.api.file.FileTree; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFile; import org.gradle.api.tasks.InputFiles; @@ -47,6 +47,7 @@ import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.IntStream; public class ThirdPartyAuditTask extends DefaultTask { @@ -171,19 +172,38 @@ public class ThirdPartyAuditTask extends DefaultTask { File jarExpandDir = getJarExpandDir(); // We need to clean up to make sure old dependencies don't linger getProject().delete(jarExpandDir); - jars.forEach(jar -> + + jars.forEach(jar -> { + FileTree jarFiles = getProject().zipTree(jar); getProject().copy(spec -> { + spec.from(jarFiles); + spec.into(jarExpandDir); + // exclude classes from multi release jars + spec.exclude("META-INF/versions/**"); + }); + // Deal with multi release jars: + // The order is important, we iterate here so we don't depend on the order in which Gradle executes the spec + // We extract multi release jar classes ( if these exist ) going from 9 - the first to support them, to the + // current `targetCompatibility` version. + // Each extract will overwrite the top level classes that existed before it, the result is that we end up + // with a single version of the class in `jarExpandDir`. + // This will be the closes version to `targetCompatibility`, the same class that would be loaded in a JVM + // that has `targetCompatibility` version. + // This means we only scan classes that would be loaded into `targetCompatibility`, and don't look at any + // pther version specific implementation of said classes. + IntStream.rangeClosed( + Integer.parseInt(JavaVersion.VERSION_1_9.getMajorVersion()), + Integer.parseInt(targetCompatibility.getMajorVersion()) + ).forEach(majorVersion -> getProject().copy(spec -> { spec.from(getProject().zipTree(jar)); spec.into(jarExpandDir); - // Exclude classes for multi release jars above target - for (int i = Integer.parseInt(targetCompatibility.getMajorVersion()) + 1; - i <= Integer.parseInt(JavaVersion.VERSION_HIGHER.getMajorVersion()); - i++ - ) { - spec.exclude("META-INF/versions/" + i + "/**"); - } - }) - ); + String metaInfPrefix = "META-INF/versions/" + majorVersion; + spec.include(metaInfPrefix + "/**"); + // Drop the version specific prefix + spec.eachFile(details -> details.setPath(details.getPath().replace(metaInfPrefix, ""))); + spec.setIncludeEmptyDirs(false); + })); + }); } private void assertNoJarHell(Set jdkJarHellClasses) { @@ -276,9 +296,9 @@ public class ThirdPartyAuditTask extends DefaultTask { private Set runJdkJarHellCheck() throws IOException { ByteArrayOutputStream standardOut = new ByteArrayOutputStream(); ExecResult execResult = getProject().javaexec(spec -> { - URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation(); + URL location = JdkJarHellCheck.class.getProtectionDomain().getCodeSource().getLocation(); if (location.getProtocol().equals("file") == false) { - throw new GradleException("Unexpected location for NamingConventionCheck class: " + location); + throw new GradleException("Unexpected location for JdkJarHellCheck class: " + location); } try { spec.classpath( diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchConfiguration.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java similarity index 96% rename from buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchConfiguration.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java index 913d88e9fa1..a200c75880e 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchConfiguration.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import org.elasticsearch.gradle.Distribution; import org.elasticsearch.gradle.Version; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java similarity index 98% rename from buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchNode.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 8b78fc2b627..a196cb09e97 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import org.elasticsearch.GradleServicesAdapter; import org.elasticsearch.gradle.Distribution; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ClusterformationPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java similarity index 95% rename from buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ClusterformationPlugin.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 779e7b61ed9..3a137906bec 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ClusterformationPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import groovy.lang.Closure; import org.elasticsearch.GradleServicesAdapter; @@ -37,12 +37,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -public class ClusterformationPlugin implements Plugin { +public class TestClustersPlugin implements Plugin { public static final String LIST_TASK_NAME = "listElasticSearchClusters"; public static final String EXTENSION_NAME = "elasticSearchClusters"; - private final Logger logger = Logging.getLogger(ClusterformationPlugin.class); + private final Logger logger = Logging.getLogger(TestClustersPlugin.class); @Override public void apply(Project project) { diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties deleted file mode 100644 index dfd6cd9956a..00000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties +++ /dev/null @@ -1 +0,0 @@ -implementation-class=org.elasticsearch.gradle.clusterformation.ClusterformationPlugin diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.testclusters.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.testclusters.properties new file mode 100644 index 00000000000..8d81f05fc69 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.testclusters.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.testclusters.TestClustersPlugin diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/clusterformation/ClusterformationPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java similarity index 89% rename from buildSrc/src/test/java/org/elasticsearch/gradle/clusterformation/ClusterformationPluginIT.java rename to buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java index c690557537d..021bd9bb151 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/clusterformation/ClusterformationPluginIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import org.gradle.testkit.runner.BuildResult; @@ -26,11 +26,11 @@ import org.gradle.testkit.runner.TaskOutcome; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; -public class ClusterformationPluginIT extends GradleIntegrationTestCase { +public class TestClustersPluginIT extends GradleIntegrationTestCase { public void testListClusters() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("listElasticSearchClusters", "-s") .withPluginClasspath() .build(); @@ -45,7 +45,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase { public void testUseClusterByOne() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("user1", "-s") .withPluginClasspath() .build(); @@ -60,7 +60,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase { public void testUseClusterByOneWithDryRun() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("user1", "-s", "--dry-run") .withPluginClasspath() .build(); @@ -75,7 +75,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase { public void testUseClusterByTwo() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("user1", "user2", "-s") .withPluginClasspath() .build(); @@ -92,7 +92,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase { public void testUseClusterByUpToDateTask() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("upToDate1", "upToDate2", "-s") .withPluginClasspath() .build(); @@ -109,7 +109,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase { public void testUseClusterBySkippedTask() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("skipped1", "skipped2", "-s") .withPluginClasspath() .build(); @@ -126,7 +126,7 @@ public class ClusterformationPluginIT extends GradleIntegrationTestCase { public void tetUseClusterBySkippedAndWorkingTask() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("skipped1", "user1", "-s") .withPluginClasspath() .build(); diff --git a/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle index 95d1453025e..c87c097e6be 100644 --- a/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle +++ b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle @@ -22,7 +22,7 @@ task sample { // dependsOn buildResources.outputDir // for now it's just dependsOn buildResources - // we have to refference it at configuration time in order to be picked up + // we have to reference it at configuration time in order to be picked up ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml') doLast { println "This task is using ${file(checkstyle_suppressions)}" @@ -35,4 +35,4 @@ task noConfigAfterExecution { println "This should cause an error because we are refferencing " + "${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran." } -} \ No newline at end of file +} diff --git a/buildSrc/src/testKit/clusterformation/build.gradle b/buildSrc/src/testKit/testclusters/build.gradle similarity index 94% rename from buildSrc/src/testKit/clusterformation/build.gradle rename to buildSrc/src/testKit/testclusters/build.gradle index ae9dd8a2c33..083ce97b963 100644 --- a/buildSrc/src/testKit/clusterformation/build.gradle +++ b/buildSrc/src/testKit/testclusters/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'elasticsearch.clusterformation' + id 'elasticsearch.testclusters' } elasticSearchClusters { diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 5b611980f1c..5f76b232ecb 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -15,6 +15,8 @@ slf4j = 1.6.2 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 4.5.1 +netty = 4.1.30.Final + # test dependencies randomizedrunner = 2.7.0 junit = 4.12 diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index eb070759ed9..035d1fd26fb 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import java.io.IOException; +import java.util.Collections; import static java.util.Collections.emptySet; @@ -83,7 +84,7 @@ public final class IngestClient { */ public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options, - GetPipelineResponse::fromXContent, emptySet()); + GetPipelineResponse::fromXContent, Collections.singleton(404)); } /** @@ -96,7 +97,7 @@ public final class IngestClient { */ public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options, - GetPipelineResponse::fromXContent, listener, emptySet()); + GetPipelineResponse::fromXContent, listener, Collections.singleton(404)); } /** diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 1030464be4f..e3570a2a837 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -38,6 +38,7 @@ import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetDatafeedRequest; +import org.elasticsearch.client.ml.GetDatafeedStatsRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobStatsRequest; @@ -45,6 +46,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest; import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.PostDataRequest; +import org.elasticsearch.client.ml.PreviewDatafeedRequest; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutJobRequest; @@ -146,7 +148,12 @@ final class MLRequestConverters { Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(request); - params.putParam("force", Boolean.toString(deleteJobRequest.isForce())); + if (deleteJobRequest.getForce() != null) { + params.putParam("force", Boolean.toString(deleteJobRequest.getForce())); + } + if (deleteJobRequest.getWaitForCompletion() != null) { + params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion())); + } return request; } @@ -259,6 +266,34 @@ final class MLRequestConverters { return request; } + static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("datafeeds") + .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds())) + .addPathPartAsIs("_stats") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + if (getDatafeedStatsRequest.isAllowNoDatafeeds() != null) { + params.putParam("allow_no_datafeeds", Boolean.toString(getDatafeedStatsRequest.isAllowNoDatafeeds())); + } + return request; + } + + static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("datafeeds") + .addPathPart(previewDatafeedRequest.getDatafeedId()) + .addPathPartAsIs("_preview") + .build(); + return new Request(HttpGet.METHOD_NAME, endpoint); + } + static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 43bc18fad0d..8c442d8ffa6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest; import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -38,6 +39,8 @@ import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetDatafeedResponse; +import org.elasticsearch.client.ml.GetDatafeedStatsRequest; +import org.elasticsearch.client.ml.GetDatafeedStatsResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetJobRequest; @@ -52,6 +55,8 @@ import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; +import org.elasticsearch.client.ml.PreviewDatafeedRequest; +import org.elasticsearch.client.ml.PreviewDatafeedResponse; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutDatafeedRequest; @@ -181,7 +186,7 @@ public final class MachineLearningClient { } /** - * Gets one or more Machine Learning job configuration info, asynchronously. + * Gets usage statistics for one or more Machine Learning jobs, asynchronously. *

* For additional info * see Get job stats docs @@ -207,14 +212,15 @@ public final class MachineLearningClient { * * @param request The request to delete the job * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement + * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for + * completion * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { + public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, MLRequestConverters::deleteJob, options, - AcknowledgedResponse::fromXContent, + DeleteJobResponse::fromXContent, Collections.emptySet()); } @@ -228,11 +234,11 @@ public final class MachineLearningClient { * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ - public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { + public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, MLRequestConverters::deleteJob, options, - AcknowledgedResponse::fromXContent, + DeleteJobResponse::fromXContent, listener, Collections.emptySet()); } @@ -649,6 +655,90 @@ public final class MachineLearningClient { Collections.emptySet()); } + /** + * Gets statistics for one or more Machine Learning datafeeds + *

+ * For additional info + * see Get datafeed stats docs + * + * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return {@link GetDatafeedStatsResponse} response object containing + * the {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} objects and the number of datafeeds found + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public GetDatafeedStatsResponse getDatafeedStats(GetDatafeedStatsRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::getDatafeedStats, + options, + GetDatafeedStatsResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Previews the given Machine Learning Datafeed + *

+ * For additional info + * see + * ML Preview Datafeed documentation + * + * @param request The request to preview the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in + * JSON format + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::previewDatafeed, + options, + PreviewDatafeedResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Gets statistics for one or more Machine Learning datafeeds, asynchronously. + *

+ * For additional info + * see Get datafeed stats docs + * + * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified with {@link GetDatafeedStatsResponse} upon request completion + */ + public void getDatafeedStatsAsync(GetDatafeedStatsRequest request, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::getDatafeedStats, + options, + GetDatafeedStatsResponse::fromXContent, + listener, + Collections.emptySet()); + } + + /** + * Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion + *

+ * For additional info + * see + * ML Preview Datafeed documentation + * + * @param request The request to preview the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void previewDatafeedAsync(PreviewDatafeedRequest request, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::previewDatafeed, + options, + PreviewDatafeedResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java index c182cc27e84..cfa5e6e61fa 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -19,6 +19,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.watcher.ActivateWatchRequest; +import org.elasticsearch.client.watcher.ActivateWatchResponse; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.AckWatchResponse; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; @@ -121,4 +123,31 @@ public final class WatcherClient { AckWatchResponse::fromXContent, listener, emptySet()); } + /** + * Activate a watch from the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ActivateWatchResponse activateWatch(ActivateWatchRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::activateWatch, options, + ActivateWatchResponse::fromXContent, singleton(404)); + } + + /** + * Asynchronously activates a watch from the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void activateWatchAsync(ActivateWatchRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::activateWatch, options, + ActivateWatchResponse::fromXContent, listener, singleton(404)); + } + } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java index 7a8fa19633e..3a17056f9bf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java @@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; +import org.elasticsearch.client.watcher.ActivateWatchRequest; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; @@ -73,4 +74,16 @@ public class WatcherRequestConverters { Request request = new Request(HttpPut.METHOD_NAME, endpoint); return request; } + + static Request activateWatch(ActivateWatchRequest activateWatchRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(activateWatchRequest.getWatchId()) + .addPathPartAsIs("_activate") + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + return request; + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java index a355f7ec659..44e3668059c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java @@ -29,7 +29,8 @@ import java.util.Objects; public class DeleteJobRequest extends ActionRequest { private String jobId; - private boolean force; + private Boolean force; + private Boolean waitForCompletion; public DeleteJobRequest(String jobId) { this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); @@ -47,7 +48,7 @@ public class DeleteJobRequest extends ActionRequest { this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); } - public boolean isForce() { + public Boolean getForce() { return force; } @@ -57,10 +58,24 @@ public class DeleteJobRequest extends ActionRequest { * * @param force When {@code true} forcefully delete an opened job. Defaults to {@code false} */ - public void setForce(boolean force) { + public void setForce(Boolean force) { this.force = force; } + public Boolean getWaitForCompletion() { + return waitForCompletion; + } + + /** + * Set whether this request should wait until the operation has completed before returning + * @param waitForCompletion When {@code true} the call will wait for the job deletion to complete. + * Otherwise, the deletion will be executed asynchronously and the response + * will contain the task id. + */ + public void setWaitForCompletion(Boolean waitForCompletion) { + this.waitForCompletion = waitForCompletion; + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java new file mode 100644 index 00000000000..f1487c8c276 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.TaskId; + +import java.io.IOException; +import java.util.Objects; + +/** + * Response object that contains the acknowledgement or the task id + * depending on whether the delete job action was requested to wait for completion. + */ +public class DeleteJobResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); + private static final ParseField TASK = new ParseField("task"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("delete_job_response", + true, a-> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1])); + + static { + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING); + } + + public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final Boolean acknowledged; + private final TaskId task; + + DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) { + assert acknowledged != null || task != null; + this.acknowledged = acknowledged; + this.task = task; + } + + /** + * Get the action acknowledgement + * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or + * otherwise a {@code boolean} that indicates whether the job was deleted successfully. + */ + public Boolean getAcknowledged() { + return acknowledged; + } + + /** + * Get the task id + * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or + * otherwise the id of the job deletion task. + */ + public TaskId getTask() { + return task; + } + + @Override + public int hashCode() { + return Objects.hash(acknowledged, task); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DeleteJobResponse that = (DeleteJobResponse) other; + return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (acknowledged != null) { + builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); + } + if (task != null) { + builder.field(TASK.getPreferredName(), task.toString()); + } + builder.endObject(); + return builder; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java new file mode 100644 index 00000000000..b4949708c17 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java @@ -0,0 +1,147 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Request object to get {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} by their respective datafeedIds + * + * {@code _all} explicitly gets all the datafeeds' statistics in the cluster + * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster + */ +public class GetDatafeedStatsRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_datafeed_stats_request", a -> new GetDatafeedStatsRequest((List) a[0])); + + static { + PARSER.declareField(ConstructingObjectParser.constructorArg(), + p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), + DatafeedConfig.ID, ObjectParser.ValueType.STRING_ARRAY); + PARSER.declareBoolean(GetDatafeedStatsRequest::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS); + } + + private static final String ALL_DATAFEEDS = "_all"; + + private final List datafeedIds; + private Boolean allowNoDatafeeds; + + /** + * Explicitly gets all datafeeds statistics + * + * @return a {@link GetDatafeedStatsRequest} for all existing datafeeds + */ + public static GetDatafeedStatsRequest getAllDatafeedStatsRequest(){ + return new GetDatafeedStatsRequest(ALL_DATAFEEDS); + } + + GetDatafeedStatsRequest(List datafeedIds) { + if (datafeedIds.stream().anyMatch(Objects::isNull)) { + throw new NullPointerException("datafeedIds must not contain null values"); + } + this.datafeedIds = new ArrayList<>(datafeedIds); + } + + /** + * Get the specified Datafeed's statistics via their unique datafeedIds + * + * @param datafeedIds must be non-null and each datafeedId must be non-null + */ + public GetDatafeedStatsRequest(String... datafeedIds) { + this(Arrays.asList(datafeedIds)); + } + + /** + * All the datafeedIds for which to get statistics + */ + public List getDatafeedIds() { + return datafeedIds; + } + + public Boolean isAllowNoDatafeeds() { + return this.allowNoDatafeeds; + } + + /** + * Whether to ignore if a wildcard expression matches no datafeeds. + * + * This includes {@code _all} string or when no datafeeds have been specified + * + * @param allowNoDatafeeds When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} + */ + public void setAllowNoDatafeeds(boolean allowNoDatafeeds) { + this.allowNoDatafeeds = allowNoDatafeeds; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedIds, allowNoDatafeeds); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + GetDatafeedStatsRequest that = (GetDatafeedStatsRequest) other; + return Objects.equals(datafeedIds, that.datafeedIds) && + Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); + if (allowNoDatafeeds != null) { + builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds); + } + builder.endObject(); + return builder; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java new file mode 100644 index 00000000000..548c8fe2359 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.datafeed.DatafeedStats; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Contains a {@link List} of the found {@link DatafeedStats} objects and the total count found + */ +public class GetDatafeedStatsResponse extends AbstractResultResponse { + + public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("get_datafeed_stats_response", + true, + a -> new GetDatafeedStatsResponse((List) a[0], (long) a[1])); + + static { + PARSER.declareObjectArray(constructorArg(), DatafeedStats.PARSER, RESULTS_FIELD); + PARSER.declareLong(constructorArg(), COUNT); + } + + GetDatafeedStatsResponse(List results, long count) { + super(RESULTS_FIELD, results, count); + } + + /** + * The collection of {@link DatafeedStats} objects found in the query + */ + public List datafeedStats() { + return results; + } + + public static GetDatafeedStatsResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public int hashCode() { + return Objects.hash(results, count); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + GetDatafeedStatsResponse other = (GetDatafeedStatsResponse) obj; + return Objects.equals(results, other.results) && count == other.count; + } + + @Override + public final String toString() { + return Strings.toString(this); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java new file mode 100644 index 00000000000..a21e96b4642 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request to preview a MachineLearning Datafeed + */ +public class PreviewDatafeedRequest extends ActionRequest implements ToXContentObject { + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "open_datafeed_request", true, a -> new PreviewDatafeedRequest((String) a[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); + } + + public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final String datafeedId; + + /** + * Create a new request with the desired datafeedId + * + * @param datafeedId unique datafeedId, must not be null + */ + public PreviewDatafeedRequest(String datafeedId) { + this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); + } + + public String getDatafeedId() { + return datafeedId; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + PreviewDatafeedRequest that = (PreviewDatafeedRequest) other; + return Objects.equals(datafeedId, that.datafeedId); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java new file mode 100644 index 00000000000..ca96f153c60 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Response containing a datafeed preview in JSON format + */ +public class PreviewDatafeedResponse extends ActionResponse implements ToXContentObject { + + private BytesReference preview; + + public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + parser.nextToken(); + builder.copyCurrentStructure(parser); + return new PreviewDatafeedResponse(BytesReference.bytes(builder)); + } + } + + public PreviewDatafeedResponse(BytesReference preview) { + this.preview = preview; + } + + public BytesReference getPreview() { + return preview; + } + + /** + * Parses the preview to a list of {@link Map} objects + * @return List of previewed data + * @throws IOException If there is a parsing issue with the {@link BytesReference} + * @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails + */ + @SuppressWarnings("unchecked") + public List> getDataList() throws IOException { + try(StreamInput streamInput = preview.streamInput(); + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput)) { + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.START_ARRAY) { + return parser.listOrderedMap().stream().map(obj -> (Map)obj).collect(Collectors.toList()); + } else { + return Collections.singletonList(parser.mapOrdered()); + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + try (InputStream stream = preview.streamInput()) { + builder.rawValue(stream, XContentType.JSON); + } + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(preview); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj; + return Objects.equals(preview, other.preview); + } + + @Override + public final String toString() { + return Strings.toString(this); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java new file mode 100644 index 00000000000..e83ae211fb5 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.datafeed; + +import org.elasticsearch.common.ParseField; + +import java.util.Locale; + +/** + * Datafeed State POJO + */ +public enum DatafeedState { + + STARTED, STOPPED, STARTING, STOPPING; + + public static final ParseField STATE = new ParseField("state"); + + public static DatafeedState fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java new file mode 100644 index 00000000000..8a9f9ae9a79 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.datafeed; + +import org.elasticsearch.client.ml.NodeAttributes; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +/** + * Datafeed Statistics POJO + */ +public class DatafeedStats implements ToXContentObject { + + private final String datafeedId; + private final DatafeedState datafeedState; + @Nullable + private final NodeAttributes node; + @Nullable + private final String assignmentExplanation; + + public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); + public static final ParseField NODE = new ParseField("node"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("datafeed_stats", + true, + a -> { + String datafeedId = (String)a[0]; + DatafeedState datafeedState = DatafeedState.fromString((String)a[1]); + NodeAttributes nodeAttributes = (NodeAttributes)a[2]; + String assignmentExplanation = (String)a[3]; + return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation); + } ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); + } + + public DatafeedStats(String datafeedId, DatafeedState datafeedState, @Nullable NodeAttributes node, + @Nullable String assignmentExplanation) { + this.datafeedId = Objects.requireNonNull(datafeedId); + this.datafeedState = Objects.requireNonNull(datafeedState); + this.node = node; + this.assignmentExplanation = assignmentExplanation; + } + + public String getDatafeedId() { + return datafeedId; + } + + public DatafeedState getDatafeedState() { + return datafeedState; + } + + public NodeAttributes getNode() { + return node; + } + + public String getAssignmentExplanation() { + return assignmentExplanation; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + builder.field(DatafeedState.STATE.getPreferredName(), datafeedState.toString()); + if (node != null) { + builder.startObject("node"); + builder.field("id", node.getId()); + builder.field("name", node.getName()); + builder.field("ephemeral_id", node.getEphemeralId()); + builder.field("transport_address", node.getTransportAddress()); + + builder.startObject("attributes"); + for (Map.Entry entry : node.getAttributes().entrySet()) { + if (entry.getKey().startsWith("ml.")) { + builder.field(entry.getKey(), entry.getValue()); + } + } + builder.endObject(); + builder.endObject(); + } + if (assignmentExplanation != null) { + builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DatafeedStats other = (DatafeedStats) obj; + return Objects.equals(datafeedId, other.datafeedId) && + Objects.equals(this.datafeedState, other.datafeedState) && + Objects.equals(this.node, other.node) && + Objects.equals(this.assignmentExplanation, other.assignmentExplanation); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java index e1af60269b5..44fc18032d2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java @@ -215,7 +215,7 @@ public class Detector implements ToXContentObject { } /** - * Excludes frequently-occuring metrics from the analysis; + * Excludes frequently-occurring metrics from the analysis; * can apply to 'by' field, 'over' field, or both * * @return the value that the user set diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java index 7740d8cfc51..13b4dcb955a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java @@ -57,7 +57,6 @@ public class Job implements ToXContentObject { public static final ParseField DATA_DESCRIPTION = new ParseField("data_description"); public static final ParseField DESCRIPTION = new ParseField("description"); public static final ParseField FINISHED_TIME = new ParseField("finished_time"); - public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory"); public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config"); public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days"); @@ -66,6 +65,7 @@ public class Job implements ToXContentObject { public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); + public static final ParseField DELETING = new ParseField("deleting"); public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); @@ -82,10 +82,6 @@ public class Job implements ToXContentObject { (p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()), FINISHED_TIME, ValueType.VALUE); - PARSER.declareField(Builder::setLastDataTime, - (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), - LAST_DATA_TIME, - ValueType.VALUE); PARSER.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY); PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG); PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS); @@ -99,6 +95,7 @@ public class Job implements ToXContentObject { PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); + PARSER.declareBoolean(Builder::setDeleting, DELETING); } private final String jobId; @@ -108,7 +105,6 @@ public class Job implements ToXContentObject { private final String description; private final Date createTime; private final Date finishedTime; - private final Date lastDataTime; private final Long establishedModelMemory; private final AnalysisConfig analysisConfig; private final AnalysisLimits analysisLimits; @@ -121,13 +117,14 @@ public class Job implements ToXContentObject { private final Map customSettings; private final String modelSnapshotId; private final String resultsIndexName; + private final Boolean deleting; - private Job(String jobId, String jobType, List groups, String description, Date createTime, - Date finishedTime, Date lastDataTime, Long establishedModelMemory, + private Job(String jobId, String jobType, List groups, String description, + Date createTime, Date finishedTime, Long establishedModelMemory, AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map customSettings, - String modelSnapshotId, String resultsIndexName) { + String modelSnapshotId, String resultsIndexName, Boolean deleting) { this.jobId = jobId; this.jobType = jobType; @@ -135,7 +132,6 @@ public class Job implements ToXContentObject { this.description = description; this.createTime = createTime; this.finishedTime = finishedTime; - this.lastDataTime = lastDataTime; this.establishedModelMemory = establishedModelMemory; this.analysisConfig = analysisConfig; this.analysisLimits = analysisLimits; @@ -148,6 +144,7 @@ public class Job implements ToXContentObject { this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); this.modelSnapshotId = modelSnapshotId; this.resultsIndexName = resultsIndexName; + this.deleting = deleting; } /** @@ -205,16 +202,6 @@ public class Job implements ToXContentObject { return finishedTime; } - /** - * The last time data was uploaded to the job or null if no - * data has been seen. - * - * @return The date at which the last data was processed - */ - public Date getLastDataTime() { - return lastDataTime; - } - /** * The established model memory of the job, or null if model * memory has not reached equilibrium yet. @@ -292,6 +279,10 @@ public class Job implements ToXContentObject { return modelSnapshotId; } + public Boolean getDeleting() { + return deleting; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -313,10 +304,6 @@ public class Job implements ToXContentObject { builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix, finishedTime.getTime()); } - if (lastDataTime != null) { - builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix, - lastDataTime.getTime()); - } if (establishedModelMemory != null) { builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory); } @@ -351,6 +338,9 @@ public class Job implements ToXContentObject { if (resultsIndexName != null) { builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); } + if (deleting != null) { + builder.field(DELETING.getPreferredName(), deleting); + } builder.endObject(); return builder; } @@ -372,7 +362,6 @@ public class Job implements ToXContentObject { && Objects.equals(this.description, that.description) && Objects.equals(this.createTime, that.createTime) && Objects.equals(this.finishedTime, that.finishedTime) - && Objects.equals(this.lastDataTime, that.lastDataTime) && Objects.equals(this.establishedModelMemory, that.establishedModelMemory) && Objects.equals(this.analysisConfig, that.analysisConfig) && Objects.equals(this.analysisLimits, that.analysisLimits) @@ -384,15 +373,16 @@ public class Job implements ToXContentObject { && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) && Objects.equals(this.customSettings, that.customSettings) && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) - && Objects.equals(this.resultsIndexName, that.resultsIndexName); + && Objects.equals(this.resultsIndexName, that.resultsIndexName) + && Objects.equals(this.deleting, that.deleting); } @Override public int hashCode() { - return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, + return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, resultsIndexName); + modelSnapshotId, resultsIndexName, deleting); } @Override @@ -415,7 +405,6 @@ public class Job implements ToXContentObject { private DataDescription dataDescription; private Date createTime; private Date finishedTime; - private Date lastDataTime; private Long establishedModelMemory; private ModelPlotConfig modelPlotConfig; private Long renormalizationWindowDays; @@ -425,6 +414,7 @@ public class Job implements ToXContentObject { private Map customSettings; private String modelSnapshotId; private String resultsIndexName; + private Boolean deleting; private Builder() { } @@ -443,7 +433,6 @@ public class Job implements ToXContentObject { this.dataDescription = job.getDataDescription(); this.createTime = job.getCreateTime(); this.finishedTime = job.getFinishedTime(); - this.lastDataTime = job.getLastDataTime(); this.establishedModelMemory = job.getEstablishedModelMemory(); this.modelPlotConfig = job.getModelPlotConfig(); this.renormalizationWindowDays = job.getRenormalizationWindowDays(); @@ -453,6 +442,7 @@ public class Job implements ToXContentObject { this.customSettings = job.getCustomSettings(); this.modelSnapshotId = job.getModelSnapshotId(); this.resultsIndexName = job.getResultsIndexNameNoPrefix(); + this.deleting = job.getDeleting(); } public Builder setId(String id) { @@ -504,16 +494,6 @@ public class Job implements ToXContentObject { return this; } - /** - * Set the wall clock time of the last data upload - * - * @param lastDataTime Wall clock time - */ - public Builder setLastDataTime(Date lastDataTime) { - this.lastDataTime = lastDataTime; - return this; - } - public Builder setEstablishedModelMemory(Long establishedModelMemory) { this.establishedModelMemory = establishedModelMemory; return this; @@ -559,6 +539,11 @@ public class Job implements ToXContentObject { return this; } + Builder setDeleting(Boolean deleting) { + this.deleting = deleting; + return this; + } + /** * Builds a job. * @@ -568,10 +553,10 @@ public class Job implements ToXContentObject { Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null"); Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null"); return new Job( - id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, + id, jobType, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, resultsIndexName); + modelSnapshotId, resultsIndexName, deleting); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java index c10610a872f..3f743b36422 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java @@ -48,6 +48,7 @@ public class AnomalyRecord implements ToXContentObject { * Result fields (all detector types) */ public static final ParseField PROBABILITY = new ParseField("probability"); + public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact"); public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); @@ -94,6 +95,7 @@ public class AnomalyRecord implements ToXContentObject { PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE); PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY); + PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT); PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE); PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE); PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX); @@ -117,6 +119,7 @@ public class AnomalyRecord implements ToXContentObject { private final String jobId; private int detectorIndex; private double probability; + private Double multiBucketImpact; private String byFieldName; private String byFieldValue; private String correlatedByFieldValue; @@ -155,6 +158,9 @@ public class AnomalyRecord implements ToXContentObject { builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(PROBABILITY.getPreferredName(), probability); + if (multiBucketImpact != null) { + builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact); + } builder.field(RECORD_SCORE.getPreferredName(), recordScore); builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); @@ -254,6 +260,14 @@ public class AnomalyRecord implements ToXContentObject { probability = value; } + public double getMultiBucketImpact() { + return multiBucketImpact; + } + + void setMultiBucketImpact(double value) { + multiBucketImpact = value; + } + public String getByFieldName() { return byFieldName; } @@ -376,7 +390,7 @@ public class AnomalyRecord implements ToXContentObject { @Override public int hashCode() { - return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore, + return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore, initialRecordScore, typical, actual,function, functionDescription, fieldName, byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName, partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim, @@ -399,6 +413,7 @@ public class AnomalyRecord implements ToXContentObject { && this.detectorIndex == that.detectorIndex && this.bucketSpan == that.bucketSpan && this.probability == that.probability + && Objects.equals(this.multiBucketImpact, that.multiBucketImpact) && this.recordScore == that.recordScore && this.initialRecordScore == that.initialRecordScore && Objects.deepEquals(this.typical, that.typical) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ActivateWatchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ActivateWatchRequest.java new file mode 100644 index 00000000000..7f2849ff39c --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ActivateWatchRequest.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.watcher; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; + +import java.util.Objects; + +/** + * A request to explicitly activate a watch. + */ +public final class ActivateWatchRequest implements Validatable { + + private final String watchId; + + public ActivateWatchRequest(String watchId) { + this.watchId = Objects.requireNonNull(watchId, "Watch identifier is required"); + if (PutWatchRequest.isValidId(this.watchId) == false) { + throw new IllegalArgumentException("Watch identifier contains whitespace"); + } + } + + /** + * @return The ID of the watch to be activated. + */ + public String getWatchId() { + return watchId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ActivateWatchRequest that = (ActivateWatchRequest) o; + return Objects.equals(watchId, that.watchId); + } + + @Override + public int hashCode() { + int result = Objects.hash(watchId); + return result; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ActivateWatchResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ActivateWatchResponse.java new file mode 100644 index 00000000000..b1e63e767f3 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ActivateWatchResponse.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.watcher; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Response from an 'activate watch' request. + */ +public final class ActivateWatchResponse { + + private static final ParseField STATUS_FIELD = new ParseField("status"); + private static ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("activate_watch_response", true, + a -> new ActivateWatchResponse((WatchStatus) a[0])); + + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (parser, context) -> WatchStatus.parse(parser), + STATUS_FIELD); + } + + private final WatchStatus status; + + public ActivateWatchResponse(WatchStatus status) { + this.status = status; + } + + public WatchStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ActivateWatchResponse that = (ActivateWatchResponse) o; + return Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(status); + } + + public static ActivateWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index 70685296192..84bf43ab019 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -78,6 +78,16 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase { assertEquals(expectedConfig.getConfigAsMap(), response.pipelines().get(0).getConfigAsMap()); } + public void testGetNonexistentPipeline() throws IOException { + String id = "nonexistent_pipeline_id"; + + GetPipelineRequest request = new GetPipelineRequest(id); + + GetPipelineResponse response = + execute(request, highLevelClient().ingest()::getPipeline, highLevelClient().ingest()::getPipelineAsync); + assertFalse(response.isFound()); + } + public void testDeletePipeline() throws IOException { String id = "some_pipeline_id"; { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index ee53da18cd2..8c5f49c943f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetDatafeedRequest; +import org.elasticsearch.client.ml.GetDatafeedStatsRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobStatsRequest; @@ -41,6 +42,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest; import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.PostDataRequest; +import org.elasticsearch.client.ml.PreviewDatafeedRequest; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutJobRequest; @@ -162,11 +164,18 @@ public class MLRequestConvertersTests extends ESTestCase { Request request = MLRequestConverters.deleteJob(deleteJobRequest); assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint()); - assertEquals(Boolean.toString(false), request.getParameters().get("force")); + assertNull(request.getParameters().get("force")); + assertNull(request.getParameters().get("wait_for_completion")); + deleteJobRequest = new DeleteJobRequest(jobId); deleteJobRequest.setForce(true); request = MLRequestConverters.deleteJob(deleteJobRequest); assertEquals(Boolean.toString(true), request.getParameters().get("force")); + + deleteJobRequest = new DeleteJobRequest(jobId); + deleteJobRequest.setWaitForCompletion(false); + request = MLRequestConverters.deleteJob(deleteJobRequest); + assertEquals(Boolean.toString(false), request.getParameters().get("wait_for_completion")); } public void testFlushJob() throws Exception { @@ -293,6 +302,30 @@ public class MLRequestConvertersTests extends ESTestCase { } } + public void testGetDatafeedStats() { + GetDatafeedStatsRequest getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest(); + + Request request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest); + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/datafeeds/_stats", request.getEndpoint()); + assertFalse(request.getParameters().containsKey("allow_no_datafeeds")); + + getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest("datafeed1", "datafeeds*"); + getDatafeedStatsRequestRequest.setAllowNoDatafeeds(true); + request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest); + + assertEquals("/_xpack/ml/datafeeds/datafeed1,datafeeds*/_stats", request.getEndpoint()); + assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_datafeeds")); + } + + public void testPreviewDatafeed() { + PreviewDatafeedRequest datafeedRequest = new PreviewDatafeedRequest("datafeed_1"); + Request request = MLRequestConverters.previewDatafeed(datafeedRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/datafeeds/" + datafeedRequest.getDatafeedId() + "/_preview", request.getEndpoint()); + } + public void testDeleteForecast() { String jobId = randomAlphaOfLength(10); DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index a8050397ad1..cac9f533501 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest; import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -41,6 +42,8 @@ import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCalendarsResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetDatafeedResponse; +import org.elasticsearch.client.ml.GetDatafeedStatsRequest; +import org.elasticsearch.client.ml.GetDatafeedStatsResponse; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobResponse; import org.elasticsearch.client.ml.GetJobStatsRequest; @@ -49,6 +52,8 @@ import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; +import org.elasticsearch.client.ml.PreviewDatafeedRequest; +import org.elasticsearch.client.ml.PreviewDatafeedResponse; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutDatafeedRequest; @@ -63,6 +68,8 @@ import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.CalendarTests; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.client.ml.datafeed.DatafeedState; +import org.elasticsearch.client.ml.datafeed.DatafeedStats; import org.elasticsearch.client.ml.job.config.AnalysisConfig; import org.elasticsearch.client.ml.job.config.DataDescription; import org.elasticsearch.client.ml.job.config.Detector; @@ -76,8 +83,11 @@ import org.elasticsearch.rest.RestStatus; import org.junit.After; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -142,17 +152,33 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2)); } - public void testDeleteJob() throws Exception { + public void testDeleteJob_GivenWaitForCompletionIsTrue() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); - AcknowledgedResponse response = execute(new DeleteJobRequest(jobId), + DeleteJobResponse response = execute(new DeleteJobRequest(jobId), machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync); - assertTrue(response.isAcknowledged()); + assertTrue(response.getAcknowledged()); + assertNull(response.getTask()); + } + + public void testDeleteJob_GivenWaitForCompletionIsFalse() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId); + deleteJobRequest.setWaitForCompletion(false); + + DeleteJobResponse response = execute(deleteJobRequest, machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync); + + assertNull(response.getAcknowledged()); + assertNotNull(response.getTask()); } public void testOpenJob() throws Exception { @@ -564,6 +590,126 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { } } + public void testGetDatafeedStats() throws Exception { + String jobId1 = "ml-get-datafeed-stats-test-id-1"; + String jobId2 = "ml-get-datafeed-stats-test-id-2"; + String indexName = "datafeed_stats_data_1"; + + // Set up the index + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + + // create the job and the datafeed + Job job1 = buildJob(jobId1); + putJob(job1); + openJob(job1); + + Job job2 = buildJob(jobId2); + putJob(job2); + + String datafeedId1 = createAndPutDatafeed(jobId1, indexName); + String datafeedId2 = createAndPutDatafeed(jobId2, indexName); + + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + machineLearningClient.startDatafeed(new StartDatafeedRequest(datafeedId1), RequestOptions.DEFAULT); + + GetDatafeedStatsRequest request = new GetDatafeedStatsRequest(datafeedId1); + + // Test getting specific + GetDatafeedStatsResponse response = + execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync); + + assertEquals(1, response.count()); + assertThat(response.datafeedStats(), hasSize(1)); + assertThat(response.datafeedStats().get(0).getDatafeedId(), equalTo(datafeedId1)); + assertThat(response.datafeedStats().get(0).getDatafeedState().toString(), equalTo(DatafeedState.STARTED.toString())); + + // Test getting all explicitly + request = GetDatafeedStatsRequest.getAllDatafeedStatsRequest(); + response = execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync); + + assertTrue(response.count() >= 2L); + assertTrue(response.datafeedStats().size() >= 2L); + assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()), + hasItems(datafeedId1, datafeedId2)); + + // Test getting all implicitly + response = + execute(new GetDatafeedStatsRequest(), machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync); + + assertTrue(response.count() >= 2L); + assertTrue(response.datafeedStats().size() >= 2L); + assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()), + hasItems(datafeedId1, datafeedId2)); + + // Test getting all with wildcard + request = new GetDatafeedStatsRequest("ml-get-datafeed-stats-test-id-*"); + response = execute(request, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync); + assertEquals(2L, response.count()); + assertThat(response.datafeedStats(), hasSize(2)); + assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()), + hasItems(datafeedId1, datafeedId2)); + + // Test when allow_no_jobs is false + final GetDatafeedStatsRequest erroredRequest = new GetDatafeedStatsRequest("datafeeds-that-do-not-exist*"); + erroredRequest.setAllowNoDatafeeds(false); + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, + () -> execute(erroredRequest, machineLearningClient::getDatafeedStats, machineLearningClient::getDatafeedStatsAsync)); + assertThat(exception.status().getStatus(), equalTo(404)); + } + + public void testPreviewDatafeed() throws Exception { + String jobId = "test-preview-datafeed"; + String indexName = "preview_data_1"; + + // Set up the index and docs + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + BulkRequest bulk = new BulkRequest(); + bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + long now = (System.currentTimeMillis()/1000)*1000; + long thePast = now - 60000; + int i = 0; + List totalTotals = new ArrayList<>(60); + while(thePast < now) { + Integer total = randomInt(1000); + IndexRequest doc = new IndexRequest(); + doc.index(indexName); + doc.type("doc"); + doc.id("id" + i); + doc.source("{\"total\":" + total + ",\"timestamp\":"+ thePast +"}", XContentType.JSON); + bulk.add(doc); + thePast += 1000; + i++; + totalTotals.add(total); + } + highLevelClient().bulk(bulk, RequestOptions.DEFAULT); + + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + // create the job and the datafeed + Job job = buildJob(jobId); + putJob(job); + openJob(job); + + String datafeedId = jobId + "-feed"; + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) + .setIndices(indexName) + .setQueryDelay(TimeValue.timeValueSeconds(1)) + .setTypes(Collections.singletonList("doc")) + .setFrequency(TimeValue.timeValueSeconds(1)).build(); + machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + + PreviewDatafeedResponse response = execute(new PreviewDatafeedRequest(datafeedId), + machineLearningClient::previewDatafeed, + machineLearningClient::previewDatafeedAsync); + + Integer[] totals = response.getDataList().stream().map(map -> (Integer)map.get("total")).toArray(Integer[]::new); + assertThat(totalTotals, containsInAnyOrder(totals)); + } + public void testDeleteForecast() throws Exception { String jobId = "test-delete-forecast"; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java index 4964fc4be50..4ea462efb02 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -19,6 +19,8 @@ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.client.watcher.ActivateWatchRequest; +import org.elasticsearch.client.watcher.ActivateWatchResponse; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.AckWatchResponse; import org.elasticsearch.client.watcher.ActionStatus; @@ -33,6 +35,7 @@ import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.rest.RestStatus; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; public class WatcherIT extends ESRestHighLevelClientTestCase { @@ -108,4 +111,26 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { new AckWatchRequest("nonexistent"), RequestOptions.DEFAULT)); assertEquals(RestStatus.NOT_FOUND, exception.status()); } + + public void testActivateWatchThatExists() throws Exception { + String watchId = randomAlphaOfLength(10); + createWatch(watchId); + ActivateWatchResponse activateWatchResponse1 = highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId), + RequestOptions.DEFAULT); + assertThat(activateWatchResponse1.getStatus().state().isActive(), is(true)); + + ActivateWatchResponse activateWatchResponse2 = highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId), + RequestOptions.DEFAULT); + assertThat(activateWatchResponse2.getStatus().state().isActive(), is(true)); + assertThat(activateWatchResponse1.getStatus().state().getTimestamp(), + lessThan(activateWatchResponse2.getStatus().state().getTimestamp())); + } + + public void testActivateWatchThatDoesNotExist() throws Exception { + String watchId = randomAlphaOfLength(10); + // exception when activating a not existing watcher + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> + highLevelClient().watcher().activateWatch(new ActivateWatchRequest(watchId), RequestOptions.DEFAULT)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java index d6227e93941..72065150989 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.watcher.ActivateWatchRequest; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; @@ -97,4 +98,14 @@ public class WatcherRequestConvertersTests extends ESTestCase { assertEquals(expectedEndpoint.toString(), request.getEndpoint()); assertThat(request.getEntity(), nullValue()); } + + public void testActivateWatchRequestConversion() { + String watchId = randomAlphaOfLength(10); + ActivateWatchRequest activateWatchRequest = new ActivateWatchRequest(watchId); + + Request request = WatcherRequestConverters.activateWatch(activateWatchRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId + "/_activate", request.getEndpoint()); + assertThat(request.getEntity(), nullValue()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 6584381223c..79d2ccacf61 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -176,7 +176,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // <3> } if (shardInfo.getFailed() > 0) { - for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) { + for (ReplicationResponse.ShardInfo.Failure failure : + shardInfo.getFailures()) { String reason = failure.reason(); // <4> } } @@ -239,8 +240,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } { IndexRequest request = new IndexRequest("posts", "doc", "async").source("field", "value"); + ActionListener listener; // tag::index-execute-listener - ActionListener listener = new ActionListener() { + listener = new ActionListener() { @Override public void onResponse(IndexResponse indexResponse) { // <1> @@ -305,8 +307,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { request = new UpdateRequest("posts", "doc", "1").fetchSource(true); //tag::update-request-with-stored-script - Script stored = - new Script(ScriptType.STORED, null, "increment-field", parameters); // <1> + Script stored = new Script( + ScriptType.STORED, null, "increment-field", parameters); // <1> request.script(stored); // <2> //end::update-request-with-stored-script updateResponse = client.update(request, RequestOptions.DEFAULT); @@ -359,7 +361,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { //end::update-request-with-doc-as-string request.fetchSource(true); // tag::update-execute - UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); + UpdateResponse updateResponse = client.update( + request, RequestOptions.DEFAULT); // end::update-execute assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); @@ -397,7 +400,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // <1> } if (shardInfo.getFailed() > 0) { - for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) { + for (ReplicationResponse.ShardInfo.Failure failure : + shardInfo.getFailures()) { String reason = failure.reason(); // <2> } } @@ -408,7 +412,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { UpdateRequest request = new UpdateRequest("posts", "type", "does_not_exist") .doc("field", "value"); try { - UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); + UpdateResponse updateResponse = client.update( + request, RequestOptions.DEFAULT); } catch (ElasticsearchException e) { if (e.status() == RestStatus.NOT_FOUND) { // <1> @@ -422,7 +427,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { .doc("field", "value") .version(1); try { - UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); + UpdateResponse updateResponse = client.update( + request, RequestOptions.DEFAULT); } catch(ElasticsearchException e) { if (e.status() == RestStatus.CONFLICT) { // <1> @@ -445,7 +451,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { //tag::update-request-source-include String[] includes = new String[]{"updated", "r*"}; String[] excludes = Strings.EMPTY_ARRAY; - request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> + request.fetchSource( + new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-include UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); @@ -459,7 +466,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { //tag::update-request-source-exclude String[] includes = Strings.EMPTY_ARRAY; String[] excludes = new String[]{"updated"}; - request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> + request.fetchSource( + new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-exclude UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); @@ -508,8 +516,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { { UpdateRequest request = new UpdateRequest("posts", "doc", "async").doc("reason", "async update").docAsUpsert(true); + ActionListener listener; // tag::update-execute-listener - ActionListener listener = new ActionListener() { + listener = new ActionListener() { @Override public void onResponse(UpdateResponse updateResponse) { // <1> @@ -548,12 +557,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::delete-request DeleteRequest request = new DeleteRequest( "posts", // <1> - "doc", // <2> - "1"); // <3> + "doc", // <2> + "1"); // <3> // end::delete-request // tag::delete-execute - DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT); + DeleteResponse deleteResponse = client.delete( + request, RequestOptions.DEFAULT); // end::delete-execute assertSame(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); @@ -567,7 +577,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // <1> } if (shardInfo.getFailed() > 0) { - for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) { + for (ReplicationResponse.ShardInfo.Failure failure : + shardInfo.getFailures()) { String reason = failure.reason(); // <2> } } @@ -598,7 +609,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { { // tag::delete-notfound DeleteRequest request = new DeleteRequest("posts", "doc", "does_not_exist"); - DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT); + DeleteResponse deleteResponse = client.delete( + request, RequestOptions.DEFAULT); if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { // <1> } @@ -612,8 +624,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::delete-conflict try { - DeleteRequest request = new DeleteRequest("posts", "doc", "1").version(2); - DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT); + DeleteResponse deleteResponse = client.delete( + new DeleteRequest("posts", "doc", "1").version(2), + RequestOptions.DEFAULT); } catch (ElasticsearchException exception) { if (exception.status() == RestStatus.CONFLICT) { // <1> @@ -628,8 +641,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { DeleteRequest request = new DeleteRequest("posts", "doc", "async"); + ActionListener listener; // tag::delete-execute-listener - ActionListener listener = new ActionListener() { + listener = new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { // <1> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index a9fbb56f68f..e8383b9ba74 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.client.ml.DeleteCalendarRequest; import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -51,6 +52,8 @@ import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetDatafeedResponse; +import org.elasticsearch.client.ml.GetDatafeedStatsRequest; +import org.elasticsearch.client.ml.GetDatafeedStatsResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetJobRequest; @@ -65,6 +68,8 @@ import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; +import org.elasticsearch.client.ml.PreviewDatafeedRequest; +import org.elasticsearch.client.ml.PreviewDatafeedResponse; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutDatafeedRequest; @@ -79,6 +84,7 @@ import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.datafeed.ChunkingConfig; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.client.ml.datafeed.DatafeedStats; import org.elasticsearch.client.ml.job.config.AnalysisConfig; import org.elasticsearch.client.ml.job.config.AnalysisLimits; import org.elasticsearch.client.ml.job.config.DataDescription; @@ -97,11 +103,13 @@ import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.TaskId; import org.junit.After; import java.io.IOException; @@ -132,45 +140,45 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { public void testCreateJob() throws Exception { RestHighLevelClient client = highLevelClient(); - //tag::x-pack-ml-put-job-detector + // tag::put-job-detector Detector.Builder detectorBuilder = new Detector.Builder() .setFunction("sum") // <1> .setFieldName("total") // <2> .setDetectorDescription("Sum of total"); // <3> - //end::x-pack-ml-put-job-detector + // end::put-job-detector - //tag::x-pack-ml-put-job-analysis-config + // tag::put-job-analysis-config List detectors = Collections.singletonList(detectorBuilder.build()); // <1> AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2> .setBucketSpan(TimeValue.timeValueMinutes(10)); // <3> - //end::x-pack-ml-put-job-analysis-config + // end::put-job-analysis-config - //tag::x-pack-ml-put-job-data-description + // tag::put-job-data-description DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder() .setTimeField("timestamp"); // <1> - //end::x-pack-ml-put-job-data-description + // end::put-job-data-description { String id = "job_1"; - //tag::x-pack-ml-put-job-config + // tag::put-job-config Job.Builder jobBuilder = new Job.Builder(id) // <1> .setAnalysisConfig(analysisConfigBuilder) // <2> .setDataDescription(dataDescriptionBuilder) // <3> .setDescription("Total sum of requests"); // <4> - //end::x-pack-ml-put-job-config + // end::put-job-config - //tag::x-pack-ml-put-job-request + // tag::put-job-request PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1> - //end::x-pack-ml-put-job-request + // end::put-job-request - //tag::x-pack-ml-put-job-execute + // tag::put-job-execute PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT); - //end::x-pack-ml-put-job-execute + // end::put-job-execute - //tag::x-pack-ml-put-job-response + // tag::put-job-response Date createTime = response.getResponse().getCreateTime(); // <1> - //end::x-pack-ml-put-job-response + // end::put-job-response assertThat(createTime.getTime(), greaterThan(0L)); } { @@ -181,7 +189,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { .setDescription("Total sum of requests"); PutJobRequest request = new PutJobRequest(jobBuilder.build()); - // tag::x-pack-ml-put-job-execute-listener + // tag::put-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(PutJobResponse response) { @@ -193,15 +201,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-put-job-execute-listener + // end::put-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-put-job-execute-async + // tag::put-job-execute-async client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-put-job-execute-async + // end::put-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -217,17 +225,19 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { - //tag::x-pack-ml-get-job-request + // tag::get-job-request GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // <1> request.setAllowNoJobs(true); // <2> - //end::x-pack-ml-get-job-request + // end::get-job-request - //tag::x-pack-ml-get-job-execute + // tag::get-job-execute GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT); + // end::get-job-execute + + // tag::get-job-response long numberOfJobs = response.count(); // <1> List jobs = response.jobs(); // <2> - //end::x-pack-ml-get-job-execute - + // end::get-job-response assertEquals(2, response.count()); assertThat(response.jobs(), hasSize(2)); assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), @@ -236,7 +246,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { { GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); - // tag::x-pack-ml-get-job-listener + // tag::get-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(GetJobResponse response) { @@ -248,15 +258,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-job-listener + // end::get-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-job-execute-async + // tag::get-job-execute-async client.machineLearning().getJobAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-job-execute-async + // end::get-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -274,21 +284,35 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { - //tag::x-pack-delete-ml-job-request - DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); - deleteJobRequest.setForce(false); // <1> - AcknowledgedResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); - //end::x-pack-delete-ml-job-request + //tag::delete-job-request + DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); // <1> + //end::delete-job-request - //tag::x-pack-delete-ml-job-response - boolean isAcknowledged = deleteJobResponse.isAcknowledged(); // <1> - //end::x-pack-delete-ml-job-response + //tag::delete-job-request-force + deleteJobRequest.setForce(false); // <1> + //end::delete-job-request-force + + //tag::delete-job-request-wait-for-completion + deleteJobRequest.setWaitForCompletion(true); // <1> + //end::delete-job-request-wait-for-completion + + //tag::delete-job-execute + DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); + //end::delete-job-execute + + //tag::delete-job-response + Boolean isAcknowledged = deleteJobResponse.getAcknowledged(); // <1> + TaskId task = deleteJobResponse.getTask(); // <2> + //end::delete-job-response + + assertTrue(isAcknowledged); + assertNull(task); } { - //tag::x-pack-delete-ml-job-request-listener - ActionListener listener = new ActionListener() { + //tag::delete-job-execute-listener + ActionListener listener = new ActionListener() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { + public void onResponse(DeleteJobResponse deleteJobResponse) { // <1> } @@ -297,16 +321,16 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-delete-ml-job-request-listener + // end::delete-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - //tag::x-pack-delete-ml-job-request-async DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job"); + // tag::delete-job-execute-async client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1> - //end::x-pack-delete-ml-job-request-async + // end::delete-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -322,19 +346,21 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { - //tag::x-pack-ml-open-job-request + // tag::open-job-request OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); // <1> openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <2> - //end::x-pack-ml-open-job-request + // end::open-job-request - //tag::x-pack-ml-open-job-execute + // tag::open-job-execute OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT); - boolean isOpened = openJobResponse.isOpened(); // <1> - //end::x-pack-ml-open-job-execute + // end::open-job-execute + // tag::open-job-response + boolean isOpened = openJobResponse.isOpened(); // <1> + // end::open-job-response } { - //tag::x-pack-ml-open-job-listener + // tag::open-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(OpenJobResponse openJobResponse) { @@ -346,15 +372,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-open-job-listener + // end::open-job-execute-listener OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-open-job-execute-async + // tag::open-job-execute-async client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-open-job-execute-async + // end::open-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -368,17 +394,20 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); - //tag::x-pack-ml-close-job-request + // tag::close-job-request CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); // <1> closeJobRequest.setForce(false); // <2> closeJobRequest.setAllowNoJobs(true); // <3> closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <4> - //end::x-pack-ml-close-job-request + // end::close-job-request - //tag::x-pack-ml-close-job-execute + // tag::close-job-execute CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT); + // end::close-job-execute + + // tag::close-job-response boolean isClosed = closeJobResponse.isClosed(); // <1> - //end::x-pack-ml-close-job-execute + // end::close-job-response } { @@ -386,7 +415,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); - //tag::x-pack-ml-close-job-listener + // tag::close-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(CloseJobResponse closeJobResponse) { @@ -398,16 +427,16 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-close-job-listener + // end::close-job-execute-listener CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-close-job-execute-async + // tag::close-job-execute-async client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-close-job-execute-async + // end::close-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -436,13 +465,13 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { Map customSettings = new HashMap<>(); customSettings.put("custom-setting-1", "custom-value"); - //tag::x-pack-ml-update-job-detector-options + // tag::update-job-detector-options JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, // <1> "detector description", // <2> detectionRules); // <3> - //end::x-pack-ml-update-job-detector-options + // end::update-job-detector-options - //tag::x-pack-ml-update-job-options + // tag::update-job-options JobUpdate update = new JobUpdate.Builder(jobId) // <1> .setDescription("My description") // <2> .setAnalysisLimits(new AnalysisLimits(1000L, null)) // <3> @@ -456,24 +485,25 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { .setCustomSettings(customSettings) // <11> .setRenormalizationWindowDays(3L) // <12> .build(); - //end::x-pack-ml-update-job-options + // end::update-job-options - //tag::x-pack-ml-update-job-request + // tag::update-job-request UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); // <1> - //end::x-pack-ml-update-job-request + // end::update-job-request - //tag::x-pack-ml-update-job-execute + // tag::update-job-execute PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT); - //end::x-pack-ml-update-job-execute - //tag::x-pack-ml-update-job-response + // end::update-job-execute + + // tag::update-job-response Job updatedJob = updateJobResponse.getResponse(); // <1> - //end::x-pack-ml-update-job-response + // end::update-job-response assertEquals(update.getDescription(), updatedJob.getDescription()); } { - //tag::x-pack-ml-update-job-listener + // tag::update-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(PutJobResponse updateJobResponse) { @@ -485,16 +515,16 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-update-job-listener + // end::update-job-execute-listener UpdateJobRequest updateJobRequest = new UpdateJobRequest(new JobUpdate.Builder(jobId).build()); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-update-job-execute-async + // tag::update-job-execute-async client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-update-job-execute-async + // end::update-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -511,56 +541,56 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { String id = "datafeed-1"; - //tag::x-pack-ml-create-datafeed-config + // tag::put-datafeed-config DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder(id, jobId) // <1> .setIndices("index_1", "index_2"); // <2> - //end::x-pack-ml-create-datafeed-config + // end::put-datafeed-config AggregatorFactories.Builder aggs = AggregatorFactories.builder(); - //tag::x-pack-ml-create-datafeed-config-set-aggregations + // tag::put-datafeed-config-set-aggregations datafeedBuilder.setAggregations(aggs); // <1> - //end::x-pack-ml-create-datafeed-config-set-aggregations + // end::put-datafeed-config-set-aggregations // Clearing aggregation to avoid complex validation rules datafeedBuilder.setAggregations((String) null); - //tag::x-pack-ml-create-datafeed-config-set-chunking-config + // tag::put-datafeed-config-set-chunking-config datafeedBuilder.setChunkingConfig(ChunkingConfig.newAuto()); // <1> - //end::x-pack-ml-create-datafeed-config-set-chunking-config + // end::put-datafeed-config-set-chunking-config - //tag::x-pack-ml-create-datafeed-config-set-frequency + // tag::put-datafeed-config-set-frequency datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30)); // <1> - //end::x-pack-ml-create-datafeed-config-set-frequency + // end::put-datafeed-config-set-frequency - //tag::x-pack-ml-create-datafeed-config-set-query + // tag::put-datafeed-config-set-query datafeedBuilder.setQuery(QueryBuilders.matchAllQuery()); // <1> - //end::x-pack-ml-create-datafeed-config-set-query + // end::put-datafeed-config-set-query - //tag::x-pack-ml-create-datafeed-config-set-query-delay + // tag::put-datafeed-config-set-query-delay datafeedBuilder.setQueryDelay(TimeValue.timeValueMinutes(1)); // <1> - //end::x-pack-ml-create-datafeed-config-set-query-delay + // end::put-datafeed-config-set-query-delay List scriptFields = Collections.emptyList(); - //tag::x-pack-ml-create-datafeed-config-set-script-fields + // tag::put-datafeed-config-set-script-fields datafeedBuilder.setScriptFields(scriptFields); // <1> - //end::x-pack-ml-create-datafeed-config-set-script-fields + // end::put-datafeed-config-set-script-fields - //tag::x-pack-ml-create-datafeed-config-set-scroll-size + // tag::put-datafeed-config-set-scroll-size datafeedBuilder.setScrollSize(1000); // <1> - //end::x-pack-ml-create-datafeed-config-set-scroll-size + // end::put-datafeed-config-set-scroll-size - //tag::x-pack-ml-put-datafeed-request + // tag::put-datafeed-request PutDatafeedRequest request = new PutDatafeedRequest(datafeedBuilder.build()); // <1> - //end::x-pack-ml-put-datafeed-request + // end::put-datafeed-request - //tag::x-pack-ml-put-datafeed-execute + // tag::put-datafeed-execute PutDatafeedResponse response = client.machineLearning().putDatafeed(request, RequestOptions.DEFAULT); - //end::x-pack-ml-put-datafeed-execute + // end::put-datafeed-execute - //tag::x-pack-ml-put-datafeed-response + // tag::put-datafeed-response DatafeedConfig datafeed = response.getResponse(); // <1> - //end::x-pack-ml-put-datafeed-response + // end::put-datafeed-response assertThat(datafeed.getId(), equalTo("datafeed-1")); } { @@ -574,7 +604,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { DatafeedConfig datafeed = new DatafeedConfig.Builder(id, jobId).setIndices("index_1", "index_2").build(); PutDatafeedRequest request = new PutDatafeedRequest(datafeed); - // tag::x-pack-ml-put-datafeed-execute-listener + // tag::put-datafeed-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(PutDatafeedResponse response) { @@ -586,15 +616,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-put-datafeed-execute-listener + // end::put-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-put-datafeed-execute-async + // tag::put-datafeed-execute-async client.machineLearning().putDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-put-datafeed-execute-async + // end::put-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -610,16 +640,19 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { - //tag::x-pack-ml-get-datafeed-request + // tag::get-datafeed-request GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // <1> request.setAllowNoDatafeeds(true); // <2> - //end::x-pack-ml-get-datafeed-request + // end::get-datafeed-request - //tag::x-pack-ml-get-datafeed-execute + // tag::get-datafeed-execute GetDatafeedResponse response = client.machineLearning().getDatafeed(request, RequestOptions.DEFAULT); + // end::get-datafeed-execute + + // tag::get-datafeed-response long numberOfDatafeeds = response.count(); // <1> List datafeeds = response.datafeeds(); // <2> - //end::x-pack-ml-get-datafeed-execute + // end::get-datafeed-response assertEquals(1, numberOfDatafeeds); assertEquals(1, datafeeds.size()); @@ -627,7 +660,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { { GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); - // tag::x-pack-ml-get-datafeed-listener + // tag::get-datafeed-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(GetDatafeedResponse response) { @@ -639,15 +672,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-datafeed-listener + // end::get-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-datafeed-execute-async + // tag::get-datafeed-execute-async client.machineLearning().getDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-datafeed-execute-async + // end::get-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -665,23 +698,26 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { - //tag::x-pack-delete-ml-datafeed-request + // tag::delete-datafeed-request DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); deleteDatafeedRequest.setForce(false); // <1> - AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed( - deleteDatafeedRequest, RequestOptions.DEFAULT); - //end::x-pack-delete-ml-datafeed-request + // end::delete-datafeed-request - //tag::x-pack-delete-ml-datafeed-response + // tag::delete-datafeed-execute + AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed( + deleteDatafeedRequest, RequestOptions.DEFAULT); + // end::delete-datafeed-execute + + // tag::delete-datafeed-response boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); // <1> - //end::x-pack-delete-ml-datafeed-response + // end::delete-datafeed-response } // Recreate datafeed to allow second deletion client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { - //tag::x-pack-delete-ml-datafeed-request-listener + // tag::delete-datafeed-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { @@ -693,16 +729,77 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-delete-ml-datafeed-request-listener + // end::delete-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - //tag::x-pack-delete-ml-datafeed-request-async DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); + + // tag::delete-datafeed-execute-async client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1> - //end::x-pack-delete-ml-datafeed-request-async + // end::delete-datafeed-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testPreviewDatafeed() throws Exception { + RestHighLevelClient client = highLevelClient(); + + Job job = MachineLearningIT.buildJob("preview-datafeed-job"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + String datafeedId = job.getId() + "-feed"; + String indexName = "preview_data_2"; + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) + .setTypes(Arrays.asList("doc")) + .setIndices(indexName) + .build(); + client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + { + // tag::preview-datafeed-request + PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // <1> + // end::preview-datafeed-request + + // tag::preview-datafeed-execute + PreviewDatafeedResponse response = client.machineLearning().previewDatafeed(request, RequestOptions.DEFAULT); + // end::preview-datafeed-execute + + // tag::preview-datafeed-response + BytesReference rawPreview = response.getPreview(); // <1> + List> semiParsedPreview = response.getDataList(); // <2> + // end::preview-datafeed-response + + assertTrue(semiParsedPreview.isEmpty()); + } + { + PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); + + // tag::preview-datafeed-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PreviewDatafeedResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::preview-datafeed-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::preview-datafeed-execute-async + client.machineLearning().previewDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::preview-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -725,27 +822,29 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); { - //tag::x-pack-ml-start-datafeed-request + // tag::start-datafeed-request StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // <1> - //end::x-pack-ml-start-datafeed-request + // end::start-datafeed-request - //tag::x-pack-ml-start-datafeed-request-options + // tag::start-datafeed-request-options request.setEnd("2018-08-21T00:00:00Z"); // <1> request.setStart("2018-08-20T00:00:00Z"); // <2> request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> - //end::x-pack-ml-start-datafeed-request-options + // end::start-datafeed-request-options - //tag::x-pack-ml-start-datafeed-execute + // tag::start-datafeed-execute StartDatafeedResponse response = client.machineLearning().startDatafeed(request, RequestOptions.DEFAULT); + // end::start-datafeed-execute + // tag::start-datafeed-response boolean started = response.isStarted(); // <1> - //end::x-pack-ml-start-datafeed-execute + // end::start-datafeed-response assertTrue(started); } { StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); - // tag::x-pack-ml-start-datafeed-listener + // tag::start-datafeed-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(StartDatafeedResponse response) { @@ -757,15 +856,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-start-datafeed-listener + // end::start-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-start-datafeed-execute-async + // tag::start-datafeed-execute-async client.machineLearning().startDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-start-datafeed-execute-async + // end::start-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -775,28 +874,30 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { RestHighLevelClient client = highLevelClient(); { - //tag::x-pack-ml-stop-datafeed-request + // tag::stop-datafeed-request StopDatafeedRequest request = new StopDatafeedRequest("datafeed_id1", "datafeed_id*"); // <1> - //end::x-pack-ml-stop-datafeed-request + // end::stop-datafeed-request request = StopDatafeedRequest.stopAllDatafeedsRequest(); - //tag::x-pack-ml-stop-datafeed-request-options + // tag::stop-datafeed-request-options request.setAllowNoDatafeeds(true); // <1> request.setForce(true); // <2> request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> - //end::x-pack-ml-stop-datafeed-request-options + // end::stop-datafeed-request-options - //tag::x-pack-ml-stop-datafeed-execute + // tag::stop-datafeed-execute StopDatafeedResponse response = client.machineLearning().stopDatafeed(request, RequestOptions.DEFAULT); + // end::stop-datafeed-execute + // tag::stop-datafeed-response boolean stopped = response.isStopped(); // <1> - //end::x-pack-ml-stop-datafeed-execute + // end::stop-datafeed-response assertTrue(stopped); } { StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest(); - // tag::x-pack-ml-stop-datafeed-listener + // tag::stop-datafeed-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(StopDatafeedResponse response) { @@ -808,15 +909,91 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-stop-datafeed-listener + // end::stop-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-stop-datafeed-execute-async + // tag::stop-datafeed-execute-async client.machineLearning().stopDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-stop-datafeed-execute-async + // end::stop-datafeed-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testGetDatafeedStats() throws Exception { + RestHighLevelClient client = highLevelClient(); + + Job job = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats1"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + Job secondJob = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats2"); + client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); + String datafeedId1 = job.getId() + "-feed"; + String indexName = "datafeed_stats_data_2"; + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId()) + .setTypes(Arrays.asList("doc")) + .setIndices(indexName) + .build(); + client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + + String datafeedId2 = secondJob.getId() + "-feed"; + DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId()) + .setTypes(Arrays.asList("doc")) + .setIndices(indexName) + .build(); + client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT); + + { + //tag::get-datafeed-stats-request + GetDatafeedStatsRequest request = + new GetDatafeedStatsRequest("get-machine-learning-datafeed-stats1-feed", "get-machine-learning-datafeed*"); // <1> + request.setAllowNoDatafeeds(true); // <2> + //end::get-datafeed-stats-request + + //tag::get-datafeed-stats-execute + GetDatafeedStatsResponse response = client.machineLearning().getDatafeedStats(request, RequestOptions.DEFAULT); + //end::get-datafeed-stats-execute + + //tag::get-datafeed-stats-response + long numberOfDatafeedStats = response.count(); // <1> + List datafeedStats = response.datafeedStats(); // <2> + //end::get-datafeed-stats-response + + assertEquals(2, response.count()); + assertThat(response.datafeedStats(), hasSize(2)); + assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()), + containsInAnyOrder(datafeed.getId(), secondDatafeed.getId())); + } + { + GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("*"); + + // tag::get-datafeed-stats-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(GetDatafeedStatsResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-datafeed-stats-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-datafeed-stats-execute-async + client.machineLearning().getDatafeedStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-datafeed-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -837,66 +1014,66 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.index(indexRequest, RequestOptions.DEFAULT); { - // tag::x-pack-ml-get-buckets-request + // tag::get-buckets-request GetBucketsRequest request = new GetBucketsRequest(jobId); // <1> - // end::x-pack-ml-get-buckets-request + // end::get-buckets-request - // tag::x-pack-ml-get-buckets-timestamp + // tag::get-buckets-timestamp request.setTimestamp("2018-08-17T00:00:00Z"); // <1> - // end::x-pack-ml-get-buckets-timestamp + // end::get-buckets-timestamp // Set timestamp to null as it is incompatible with other args request.setTimestamp(null); - // tag::x-pack-ml-get-buckets-anomaly-score + // tag::get-buckets-anomaly-score request.setAnomalyScore(75.0); // <1> - // end::x-pack-ml-get-buckets-anomaly-score + // end::get-buckets-anomaly-score - // tag::x-pack-ml-get-buckets-desc + // tag::get-buckets-desc request.setDescending(true); // <1> - // end::x-pack-ml-get-buckets-desc + // end::get-buckets-desc - // tag::x-pack-ml-get-buckets-end + // tag::get-buckets-end request.setEnd("2018-08-21T00:00:00Z"); // <1> - // end::x-pack-ml-get-buckets-end + // end::get-buckets-end - // tag::x-pack-ml-get-buckets-exclude-interim + // tag::get-buckets-exclude-interim request.setExcludeInterim(true); // <1> - // end::x-pack-ml-get-buckets-exclude-interim + // end::get-buckets-exclude-interim - // tag::x-pack-ml-get-buckets-expand + // tag::get-buckets-expand request.setExpand(true); // <1> - // end::x-pack-ml-get-buckets-expand + // end::get-buckets-expand - // tag::x-pack-ml-get-buckets-page + // tag::get-buckets-page request.setPageParams(new PageParams(100, 200)); // <1> - // end::x-pack-ml-get-buckets-page + // end::get-buckets-page // Set page params back to null so the response contains the bucket we indexed request.setPageParams(null); - // tag::x-pack-ml-get-buckets-sort + // tag::get-buckets-sort request.setSort("anomaly_score"); // <1> - // end::x-pack-ml-get-buckets-sort + // end::get-buckets-sort - // tag::x-pack-ml-get-buckets-start + // tag::get-buckets-start request.setStart("2018-08-01T00:00:00Z"); // <1> - // end::x-pack-ml-get-buckets-start + // end::get-buckets-start - // tag::x-pack-ml-get-buckets-execute + // tag::get-buckets-execute GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT); - // end::x-pack-ml-get-buckets-execute + // end::get-buckets-execute - // tag::x-pack-ml-get-buckets-response + // tag::get-buckets-response long count = response.count(); // <1> List buckets = response.buckets(); // <2> - // end::x-pack-ml-get-buckets-response + // end::get-buckets-response assertEquals(1, buckets.size()); } { GetBucketsRequest request = new GetBucketsRequest(jobId); - // tag::x-pack-ml-get-buckets-listener + // tag::get-buckets-execute-listener ActionListener listener = new ActionListener() { @Override @@ -909,15 +1086,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-buckets-listener + // end::get-buckets-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-buckets-execute-async + // tag::get-buckets-execute-async client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-buckets-execute-async + // end::get-buckets-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -935,30 +1112,30 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT); { - //tag::x-pack-ml-flush-job-request + // tag::flush-job-request FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); // <1> - //end::x-pack-ml-flush-job-request + // end::flush-job-request - //tag::x-pack-ml-flush-job-request-options + // tag::flush-job-request-options flushJobRequest.setCalcInterim(true); // <1> flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); // <2> flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); // <3> flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); // <4> flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); // <5> - //end::x-pack-ml-flush-job-request-options + // end::flush-job-request-options - //tag::x-pack-ml-flush-job-execute + // tag::flush-job-execute FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT); - //end::x-pack-ml-flush-job-execute + // end::flush-job-execute - //tag::x-pack-ml-flush-job-response + // tag::flush-job-response boolean isFlushed = flushJobResponse.isFlushed(); // <1> Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); // <2> - //end::x-pack-ml-flush-job-response + // end::flush-job-response } { - //tag::x-pack-ml-flush-job-listener + // tag::flush-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(FlushJobResponse FlushJobResponse) { @@ -970,16 +1147,16 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-flush-job-listener + // end::flush-job-execute-listener FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-flush-job-execute-async + // tag::flush-job-execute-async client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-flush-job-execute-async + // end::flush-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1016,27 +1193,27 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { }, 30, TimeUnit.SECONDS); { - //tag::x-pack-ml-delete-forecast-request + // tag::delete-forecast-request DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); // <1> - //end::x-pack-ml-delete-forecast-request + // end::delete-forecast-request - //tag::x-pack-ml-delete-forecast-request-options + // tag::delete-forecast-request-options deleteForecastRequest.setForecastIds(forecastId); // <1> deleteForecastRequest.timeout("30s"); // <2> deleteForecastRequest.setAllowNoForecasts(true); // <3> - //end::x-pack-ml-delete-forecast-request-options + // end::delete-forecast-request-options - //tag::x-pack-ml-delete-forecast-execute + // tag::delete-forecast-execute AcknowledgedResponse deleteForecastResponse = client.machineLearning().deleteForecast(deleteForecastRequest, RequestOptions.DEFAULT); - //end::x-pack-ml-delete-forecast-execute + // end::delete-forecast-execute - //tag::x-pack-ml-delete-forecast-response + // tag::delete-forecast-response boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); // <1> - //end::x-pack-ml-delete-forecast-response + // end::delete-forecast-response } { - //tag::x-pack-ml-delete-forecast-listener + // tag::delete-forecast-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(AcknowledgedResponse DeleteForecastResponse) { @@ -1048,7 +1225,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-delete-forecast-listener + // end::delete-forecast-execute-listener DeleteForecastRequest deleteForecastRequest = DeleteForecastRequest.deleteAllForecasts(job.getId()); deleteForecastRequest.setAllowNoForecasts(true); @@ -1056,9 +1233,9 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-delete-forecast-execute-async + // tag::delete-forecast-execute-async client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-delete-forecast-execute-async + // end::delete-forecast-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1074,19 +1251,19 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { - //tag::x-pack-ml-get-job-stats-request + // tag::get-job-stats-request GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // <1> request.setAllowNoJobs(true); // <2> - //end::x-pack-ml-get-job-stats-request + // end::get-job-stats-request - //tag::x-pack-ml-get-job-stats-execute + // tag::get-job-stats-execute GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT); - //end::x-pack-ml-get-job-stats-execute + // end::get-job-stats-execute - //tag::x-pack-ml-get-job-stats-response + // tag::get-job-stats-response long numberOfJobStats = response.count(); // <1> List jobStats = response.jobStats(); // <2> - //end::x-pack-ml-get-job-stats-response + // end::get-job-stats-response assertEquals(2, response.count()); assertThat(response.jobStats(), hasSize(2)); @@ -1096,7 +1273,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { { GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); - // tag::x-pack-ml-get-job-stats-listener + // tag::get-job-stats-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(GetJobStatsResponse response) { @@ -1108,15 +1285,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-job-stats-listener + // end::get-job-stats-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-job-stats-execute-async + // tag::get-job-stats-execute-async client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-job-stats-execute-async + // end::get-job-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1141,28 +1318,28 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); { - //tag::x-pack-ml-forecast-job-request + // tag::forecast-job-request ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // <1> - //end::x-pack-ml-forecast-job-request + // end::forecast-job-request - //tag::x-pack-ml-forecast-job-request-options + // tag::forecast-job-request-options forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); // <1> forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); // <2> - //end::x-pack-ml-forecast-job-request-options + // end::forecast-job-request-options - //tag::x-pack-ml-forecast-job-execute + // tag::forecast-job-execute ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT); - //end::x-pack-ml-forecast-job-execute + // end::forecast-job-execute - //tag::x-pack-ml-forecast-job-response + // tag::forecast-job-response boolean isAcknowledged = forecastJobResponse.isAcknowledged(); // <1> String forecastId = forecastJobResponse.getForecastId(); // <2> - //end::x-pack-ml-forecast-job-response + // end::forecast-job-response assertTrue(isAcknowledged); assertNotNull(forecastId); } { - //tag::x-pack-ml-forecast-job-listener + // tag::forecast-job-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(ForecastJobResponse forecastJobResponse) { @@ -1174,16 +1351,16 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-forecast-job-listener + // end::forecast-job-execute-listener ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-forecast-job-execute-async + // tag::forecast-job-execute-async client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-forecast-job-execute-async + // end::forecast-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1219,42 +1396,42 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.bulk(bulkRequest, RequestOptions.DEFAULT); { - // tag::x-pack-ml-get-overall-buckets-request + // tag::get-overall-buckets-request GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // <1> - // end::x-pack-ml-get-overall-buckets-request + // end::get-overall-buckets-request - // tag::x-pack-ml-get-overall-buckets-bucket-span + // tag::get-overall-buckets-bucket-span request.setBucketSpan(TimeValue.timeValueHours(24)); // <1> - // end::x-pack-ml-get-overall-buckets-bucket-span + // end::get-overall-buckets-bucket-span - // tag::x-pack-ml-get-overall-buckets-end + // tag::get-overall-buckets-end request.setEnd("2018-08-21T00:00:00Z"); // <1> - // end::x-pack-ml-get-overall-buckets-end + // end::get-overall-buckets-end - // tag::x-pack-ml-get-overall-buckets-exclude-interim + // tag::get-overall-buckets-exclude-interim request.setExcludeInterim(true); // <1> - // end::x-pack-ml-get-overall-buckets-exclude-interim + // end::get-overall-buckets-exclude-interim - // tag::x-pack-ml-get-overall-buckets-overall-score + // tag::get-overall-buckets-overall-score request.setOverallScore(75.0); // <1> - // end::x-pack-ml-get-overall-buckets-overall-score + // end::get-overall-buckets-overall-score - // tag::x-pack-ml-get-overall-buckets-start + // tag::get-overall-buckets-start request.setStart("2018-08-01T00:00:00Z"); // <1> - // end::x-pack-ml-get-overall-buckets-start + // end::get-overall-buckets-start - // tag::x-pack-ml-get-overall-buckets-top-n + // tag::get-overall-buckets-top-n request.setTopN(2); // <1> - // end::x-pack-ml-get-overall-buckets-top-n + // end::get-overall-buckets-top-n - // tag::x-pack-ml-get-overall-buckets-execute + // tag::get-overall-buckets-execute GetOverallBucketsResponse response = client.machineLearning().getOverallBuckets(request, RequestOptions.DEFAULT); - // end::x-pack-ml-get-overall-buckets-execute + // end::get-overall-buckets-execute - // tag::x-pack-ml-get-overall-buckets-response + // tag::get-overall-buckets-response long count = response.count(); // <1> List overallBuckets = response.overallBuckets(); // <2> - // end::x-pack-ml-get-overall-buckets-response + // end::get-overall-buckets-response assertEquals(1, overallBuckets.size()); assertThat(overallBuckets.get(0).getOverallScore(), is(closeTo(80.0, 0.001))); @@ -1263,7 +1440,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { { GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); - // tag::x-pack-ml-get-overall-buckets-listener + // tag::get-overall-buckets-execute-listener ActionListener listener = new ActionListener() { @Override @@ -1276,15 +1453,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-overall-buckets-listener + // end::get-overall-buckets-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-overall-buckets-execute-async + // tag::get-overall-buckets-execute-async client.machineLearning().getOverallBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-overall-buckets-execute-async + // end::get-overall-buckets-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1305,55 +1482,55 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.index(indexRequest, RequestOptions.DEFAULT); { - // tag::x-pack-ml-get-records-request + // tag::get-records-request GetRecordsRequest request = new GetRecordsRequest(jobId); // <1> - // end::x-pack-ml-get-records-request + // end::get-records-request - // tag::x-pack-ml-get-records-desc + // tag::get-records-desc request.setDescending(true); // <1> - // end::x-pack-ml-get-records-desc + // end::get-records-desc - // tag::x-pack-ml-get-records-end + // tag::get-records-end request.setEnd("2018-08-21T00:00:00Z"); // <1> - // end::x-pack-ml-get-records-end + // end::get-records-end - // tag::x-pack-ml-get-records-exclude-interim + // tag::get-records-exclude-interim request.setExcludeInterim(true); // <1> - // end::x-pack-ml-get-records-exclude-interim + // end::get-records-exclude-interim - // tag::x-pack-ml-get-records-page + // tag::get-records-page request.setPageParams(new PageParams(100, 200)); // <1> - // end::x-pack-ml-get-records-page + // end::get-records-page // Set page params back to null so the response contains the record we indexed request.setPageParams(null); - // tag::x-pack-ml-get-records-record-score + // tag::get-records-record-score request.setRecordScore(75.0); // <1> - // end::x-pack-ml-get-records-record-score + // end::get-records-record-score - // tag::x-pack-ml-get-records-sort + // tag::get-records-sort request.setSort("probability"); // <1> - // end::x-pack-ml-get-records-sort + // end::get-records-sort - // tag::x-pack-ml-get-records-start + // tag::get-records-start request.setStart("2018-08-01T00:00:00Z"); // <1> - // end::x-pack-ml-get-records-start + // end::get-records-start - // tag::x-pack-ml-get-records-execute + // tag::get-records-execute GetRecordsResponse response = client.machineLearning().getRecords(request, RequestOptions.DEFAULT); - // end::x-pack-ml-get-records-execute + // end::get-records-execute - // tag::x-pack-ml-get-records-response + // tag::get-records-response long count = response.count(); // <1> List records = response.records(); // <2> - // end::x-pack-ml-get-records-response + // end::get-records-response assertEquals(1, records.size()); } { GetRecordsRequest request = new GetRecordsRequest(jobId); - // tag::x-pack-ml-get-records-listener + // tag::get-records-execute-listener ActionListener listener = new ActionListener() { @Override @@ -1366,15 +1543,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-records-listener + // end::get-records-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-records-execute-async + // tag::get-records-execute-async client.machineLearning().getRecordsAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-records-execute-async + // end::get-records-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1388,35 +1565,35 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); { - //tag::x-pack-ml-post-data-request + // tag::post-data-request PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); // <1> Map mapData = new HashMap<>(); mapData.put("total", 109); jsonBuilder.addDoc(mapData); // <2> jsonBuilder.addDoc("{\"total\":1000}"); // <3> PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <4> - //end::x-pack-ml-post-data-request + // end::post-data-request - //tag::x-pack-ml-post-data-request-options + // tag::post-data-request-options postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); // <1> postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); // <2> - //end::x-pack-ml-post-data-request-options + // end::post-data-request-options postDataRequest.setResetEnd(null); postDataRequest.setResetStart(null); - //tag::x-pack-ml-post-data-execute + // tag::post-data-execute PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); - //end::x-pack-ml-post-data-execute + // end::post-data-execute - //tag::x-pack-ml-post-data-response + // tag::post-data-response DataCounts dataCounts = postDataResponse.getDataCounts(); // <1> - //end::x-pack-ml-post-data-response + // end::post-data-response assertEquals(2, dataCounts.getInputRecordCount()); } { - //tag::x-pack-ml-post-data-listener + // tag::post-data-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(PostDataResponse postDataResponse) { @@ -1428,7 +1605,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - //end::x-pack-ml-post-data-listener + // end::post-data-execute-listener PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); Map mapData = new HashMap<>(); mapData.put("total", 109); @@ -1439,9 +1616,9 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-post-data-execute-async + // tag::post-data-execute-async client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-post-data-execute-async + // end::post-data-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1463,55 +1640,55 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.index(indexRequest, RequestOptions.DEFAULT); { - // tag::x-pack-ml-get-influencers-request + // tag::get-influencers-request GetInfluencersRequest request = new GetInfluencersRequest(jobId); // <1> - // end::x-pack-ml-get-influencers-request + // end::get-influencers-request - // tag::x-pack-ml-get-influencers-desc + // tag::get-influencers-desc request.setDescending(true); // <1> - // end::x-pack-ml-get-influencers-desc + // end::get-influencers-desc - // tag::x-pack-ml-get-influencers-end + // tag::get-influencers-end request.setEnd("2018-08-21T00:00:00Z"); // <1> - // end::x-pack-ml-get-influencers-end + // end::get-influencers-end - // tag::x-pack-ml-get-influencers-exclude-interim + // tag::get-influencers-exclude-interim request.setExcludeInterim(true); // <1> - // end::x-pack-ml-get-influencers-exclude-interim + // end::get-influencers-exclude-interim - // tag::x-pack-ml-get-influencers-influencer-score + // tag::get-influencers-influencer-score request.setInfluencerScore(75.0); // <1> - // end::x-pack-ml-get-influencers-influencer-score + // end::get-influencers-influencer-score - // tag::x-pack-ml-get-influencers-page + // tag::get-influencers-page request.setPageParams(new PageParams(100, 200)); // <1> - // end::x-pack-ml-get-influencers-page + // end::get-influencers-page // Set page params back to null so the response contains the influencer we indexed request.setPageParams(null); - // tag::x-pack-ml-get-influencers-sort + // tag::get-influencers-sort request.setSort("probability"); // <1> - // end::x-pack-ml-get-influencers-sort + // end::get-influencers-sort - // tag::x-pack-ml-get-influencers-start + // tag::get-influencers-start request.setStart("2018-08-01T00:00:00Z"); // <1> - // end::x-pack-ml-get-influencers-start + // end::get-influencers-start - // tag::x-pack-ml-get-influencers-execute + // tag::get-influencers-execute GetInfluencersResponse response = client.machineLearning().getInfluencers(request, RequestOptions.DEFAULT); - // end::x-pack-ml-get-influencers-execute + // end::get-influencers-execute - // tag::x-pack-ml-get-influencers-response + // tag::get-influencers-response long count = response.count(); // <1> List influencers = response.influencers(); // <2> - // end::x-pack-ml-get-influencers-response + // end::get-influencers-response assertEquals(1, influencers.size()); } { GetInfluencersRequest request = new GetInfluencersRequest(jobId); - // tag::x-pack-ml-get-influencers-listener + // tag::get-influencers-execute-listener ActionListener listener = new ActionListener() { @Override @@ -1524,15 +1701,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-influencers-listener + // end::get-influencers-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-influencers-execute-async + // tag::get-influencers-execute-async client.machineLearning().getInfluencersAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-influencers-execute-async + // end::get-influencers-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1553,35 +1730,35 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { client.index(indexRequest, RequestOptions.DEFAULT); { - // tag::x-pack-ml-get-categories-request + // tag::get-categories-request GetCategoriesRequest request = new GetCategoriesRequest(jobId); // <1> - // end::x-pack-ml-get-categories-request + // end::get-categories-request - // tag::x-pack-ml-get-categories-category-id + // tag::get-categories-category-id request.setCategoryId(1L); // <1> - // end::x-pack-ml-get-categories-category-id + // end::get-categories-category-id - // tag::x-pack-ml-get-categories-page + // tag::get-categories-page request.setPageParams(new PageParams(100, 200)); // <1> - // end::x-pack-ml-get-categories-page + // end::get-categories-page // Set page params back to null so the response contains the category we indexed request.setPageParams(null); - // tag::x-pack-ml-get-categories-execute + // tag::get-categories-execute GetCategoriesResponse response = client.machineLearning().getCategories(request, RequestOptions.DEFAULT); - // end::x-pack-ml-get-categories-execute + // end::get-categories-execute - // tag::x-pack-ml-get-categories-response + // tag::get-categories-response long count = response.count(); // <1> List categories = response.categories(); // <2> - // end::x-pack-ml-get-categories-response + // end::get-categories-response assertEquals(1, categories.size()); } { GetCategoriesRequest request = new GetCategoriesRequest(jobId); - // tag::x-pack-ml-get-categories-listener + // tag::get-categories-execute-listener ActionListener listener = new ActionListener() { @Override @@ -1594,15 +1771,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-categories-listener + // end::get-categories-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-categories-execute-async + // tag::get-categories-execute-async client.machineLearning().getCategoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-categories-execute-async + // end::get-categories-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1611,21 +1788,21 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { public void testPutCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); - //tag::x-pack-ml-put-calendar-request + // tag::put-calendar-request Calendar calendar = new Calendar("public_holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest request = new PutCalendarRequest(calendar); // <1> - //end::x-pack-ml-put-calendar-request + // end::put-calendar-request - //tag::x-pack-ml-put-calendar-execution + // tag::put-calendar-execute PutCalendarResponse response = client.machineLearning().putCalendar(request, RequestOptions.DEFAULT); - //end::x-pack-ml-put-calendar-execution + // end::put-calendar-execute - //tag::x-pack-ml-put-calendar-response + // tag::put-calendar-response Calendar newCalendar = response.getCalendar(); // <1> - //end::x-pack-ml-put-calendar-response + // end::put-calendar-response assertThat(newCalendar.getId(), equalTo("public_holidays")); - // tag::x-pack-ml-put-calendar-listener + // tag::put-calendar-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(PutCalendarResponse response) { @@ -1637,15 +1814,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-put-calendar-listener + // end::put-calendar-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-put-calendar-execute-async + // tag::put-calendar-execute-async client.machineLearning().putCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-put-calendar-execute-async + // end::put-calendar-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1657,35 +1834,35 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { - //tag::x-pack-ml-get-calendars-request + // tag::get-calendars-request GetCalendarsRequest request = new GetCalendarsRequest(); // <1> - //end::x-pack-ml-get-calendars-request + // end::get-calendars-request - //tag::x-pack-ml-get-calendars-id + // tag::get-calendars-id request.setCalendarId("holidays"); // <1> - //end::x-pack-ml-get-calendars-id + // end::get-calendars-id - //tag::x-pack-ml-get-calendars-page + // tag::get-calendars-page request.setPageParams(new PageParams(10, 20)); // <1> - //end::x-pack-ml-get-calendars-page + // end::get-calendars-page // reset page params request.setPageParams(null); - //tag::x-pack-ml-get-calendars-execution + // tag::get-calendars-execute GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT); - //end::x-pack-ml-get-calendars-execution + // end::get-calendars-execute - // tag::x-pack-ml-get-calendars-response + // tag::get-calendars-response long count = response.count(); // <1> List calendars = response.calendars(); // <2> - // end::x-pack-ml-get-calendars-response + // end::get-calendars-response assertEquals(1, calendars.size()); } { GetCalendarsRequest request = new GetCalendarsRequest("holidays"); - // tag::x-pack-ml-get-calendars-listener + // tag::get-calendars-execute-listener ActionListener listener = new ActionListener() { @Override @@ -1698,15 +1875,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-get-calendars-listener + // end::get-calendars-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-get-calendars-execute-async + // tag::get-calendars-execute-async client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-get-calendars-execute-async + // end::get-calendars-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } @@ -1719,21 +1896,21 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { PutCalendarRequest putCalendarRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putCalendarRequest, RequestOptions.DEFAULT); - //tag::x-pack-ml-delete-calendar-request + // tag::delete-calendar-request DeleteCalendarRequest request = new DeleteCalendarRequest("holidays"); // <1> - //end::x-pack-ml-delete-calendar-request + // end::delete-calendar-request - //tag::x-pack-ml-delete-calendar-execute + // tag::delete-calendar-execute AcknowledgedResponse response = client.machineLearning().deleteCalendar(request, RequestOptions.DEFAULT); - //end::x-pack-ml-delete-calendar-execute + // end::delete-calendar-execute - //tag::x-pack-ml-delete-calendar-response + // tag::delete-calendar-response boolean isAcknowledged = response.isAcknowledged(); // <1> - //end::x-pack-ml-delete-calendar-response + // end::delete-calendar-response assertTrue(isAcknowledged); - // tag::x-pack-ml-delete-calendar-listener + // tag::delete-calendar-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(AcknowledgedResponse response) { @@ -1745,15 +1922,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }; - // end::x-pack-ml-delete-calendar-listener + // end::delete-calendar-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-ml-delete-calendar-execute-async + // tag::delete-calendar-execute-async client.machineLearning().deleteCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-ml-delete-calendar-execute-async + // end::delete-calendar-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java index 48052f86a00..4f41b723943 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.watcher.ActivateWatchRequest; +import org.elasticsearch.client.watcher.ActivateWatchResponse; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.AckWatchResponse; import org.elasticsearch.client.watcher.ActionStatus; @@ -160,9 +162,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { } { - //tag::ack-watch-execute + //tag::ack-watch-request AckWatchRequest request = new AckWatchRequest("my_watch_id", // <1> "logme", "emailme"); // <2> + //end::ack-watch-request + + //tag::ack-watch-execute AckWatchResponse response = client.watcher().ackWatch(request, RequestOptions.DEFAULT); //end::ack-watch-execute @@ -203,4 +208,60 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { } } + public void testActivateWatch() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + BytesReference watch = new BytesArray("{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"); + PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON); + request.setActive(false); // <1> + PutWatchResponse response = client.watcher().putWatch(request, RequestOptions.DEFAULT); + } + + { + //tag::activate-watch-request + ActivateWatchRequest request = new ActivateWatchRequest("my_watch_id"); + ActivateWatchResponse response = client.watcher().activateWatch(request, RequestOptions.DEFAULT); + //end::activate-watch-request + + //tag::activate-watch-response + WatchStatus watchStatus = response.getStatus(); // <1> + //end::activate-watch-response + + assertTrue(watchStatus.state().isActive()); + } + + { + ActivateWatchRequest request = new ActivateWatchRequest("my_watch_id"); + //tag::activate-watch-request-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ActivateWatchResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::activate-watch-request-listener + + //Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + //tag::activate-watch-request-async + client.watcher().activateWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + //end::activate-watch-request-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + + } + } + } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java index d3ccb98eeb6..d9f96fd0f28 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java @@ -34,12 +34,4 @@ public class DeleteJobRequestTests extends ESTestCase { ex = expectThrows(NullPointerException.class, () -> createTestInstance().setJobId(null)); assertEquals("[job_id] must not be null", ex.getMessage()); } - - public void test_WithForce() { - DeleteJobRequest deleteJobRequest = createTestInstance(); - assertFalse(deleteJobRequest.isForce()); - - deleteJobRequest.setForce(true); - assertTrue(deleteJobRequest.isForce()); - } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java new file mode 100644 index 00000000000..97a8c5b892c --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteJobResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteJobResponse createTestInstance() { + if (randomBoolean()) { + return new DeleteJobResponse(randomBoolean(), null); + } + return new DeleteJobResponse(null, new TaskId(randomAlphaOfLength(20) + ":" + randomIntBetween(1, 100))); + } + + @Override + protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteJobResponse.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetDatafeedStatsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetDatafeedStatsRequestTests.java new file mode 100644 index 00000000000..5d0e94c0e92 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetDatafeedStatsRequestTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetDatafeedStatsRequestTests extends AbstractXContentTestCase { + + public void testAllDatafeedsRequest() { + GetDatafeedStatsRequest request = GetDatafeedStatsRequest.getAllDatafeedStatsRequest(); + + assertEquals(request.getDatafeedIds().size(), 1); + assertEquals(request.getDatafeedIds().get(0), "_all"); + } + + public void testNewWithDatafeedId() { + Exception exception = expectThrows(NullPointerException.class, () -> new GetDatafeedStatsRequest("datafeed", null)); + assertEquals(exception.getMessage(), "datafeedIds must not contain null values"); + } + + @Override + protected GetDatafeedStatsRequest createTestInstance() { + int datafeedCount = randomIntBetween(0, 10); + List datafeedIds = new ArrayList<>(datafeedCount); + + for (int i = 0; i < datafeedCount; i++) { + datafeedIds.add(randomAlphaOfLength(10)); + } + + GetDatafeedStatsRequest request = new GetDatafeedStatsRequest(datafeedIds); + + if (randomBoolean()) { + request.setAllowNoDatafeeds(randomBoolean()); + } + + return request; + } + + @Override + protected GetDatafeedStatsRequest doParseInstance(XContentParser parser) throws IOException { + return GetDatafeedStatsRequest.PARSER.parse(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetDatafeedStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetDatafeedStatsResponseTests.java new file mode 100644 index 00000000000..0a0261daf20 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetDatafeedStatsResponseTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.datafeed.DatafeedStats; +import org.elasticsearch.client.ml.datafeed.DatafeedStatsTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +public class GetDatafeedStatsResponseTests extends AbstractXContentTestCase { + + @Override + protected GetDatafeedStatsResponse createTestInstance() { + + int count = randomIntBetween(1, 5); + List results = new ArrayList<>(count); + for(int i = 0; i < count; i++) { + results.add(DatafeedStatsTests.createRandomInstance()); + } + + return new GetDatafeedStatsResponse(results, count); + } + + @Override + protected GetDatafeedStatsResponse doParseInstance(XContentParser parser) throws IOException { + return GetDatafeedStatsResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PreviewDatafeedRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PreviewDatafeedRequestTests.java new file mode 100644 index 00000000000..2359ec6927a --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PreviewDatafeedRequestTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PreviewDatafeedRequestTests extends AbstractXContentTestCase { + + @Override + protected PreviewDatafeedRequest createTestInstance() { + return new PreviewDatafeedRequest(DatafeedConfigTests.randomValidDatafeedId()); + } + + @Override + protected PreviewDatafeedRequest doParseInstance(XContentParser parser) throws IOException { + return PreviewDatafeedRequest.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PreviewDatafeedResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PreviewDatafeedResponseTests.java new file mode 100644 index 00000000000..bb0ec3e5e70 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PreviewDatafeedResponseTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class PreviewDatafeedResponseTests extends ESTestCase { + + protected PreviewDatafeedResponse createTestInstance() throws IOException { + //This is just to create a random object to stand in the place of random data + DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandom(); + BytesReference bytes = XContentHelper.toXContent(datafeedConfig, XContentType.JSON, false); + return new PreviewDatafeedResponse(bytes); + } + + public void testGetDataList() throws IOException { + String rawData = "[\n" + + " {\n" + + " \"time\": 1454803200000,\n" + + " \"airline\": \"JZA\",\n" + + " \"doc_count\": 5,\n" + + " \"responsetime\": 990.4628295898438\n" + + " },\n" + + " {\n" + + " \"time\": 1454803200000,\n" + + " \"airline\": \"JBU\",\n" + + " \"doc_count\": 23,\n" + + " \"responsetime\": 877.5927124023438\n" + + " },\n" + + " {\n" + + " \"time\": 1454803200000,\n" + + " \"airline\": \"KLM\",\n" + + " \"doc_count\": 42,\n" + + " \"responsetime\": 1355.481201171875\n" + + " }\n" + + "]"; + BytesReference bytes = new BytesArray(rawData); + PreviewDatafeedResponse response = new PreviewDatafeedResponse(bytes); + assertThat(response.getDataList() + .stream() + .map(map -> (String)map.get("airline")) + .collect(Collectors.toList()), containsInAnyOrder("JZA", "JBU", "KLM")); + + rawData = "{\"key\":\"my_value\"}"; + bytes = new BytesArray(rawData); + response = new PreviewDatafeedResponse(bytes); + assertThat(response.getDataList() + .stream() + .map(map -> (String)map.get("key")) + .collect(Collectors.toList()), containsInAnyOrder("my_value")); + + } + + //Because this is raw a BytesReference, the shuffling done via `AbstractXContentTestCase` is unacceptable and causes equality failures + public void testSerializationDeserialization() throws IOException { + for (int runs = 0; runs < 20; runs++) { + XContentType xContentType = XContentType.JSON; + PreviewDatafeedResponse testInstance = createTestInstance(); + BytesReference originalXContent = XContentHelper.toXContent(testInstance, xContentType, false); + XContentParser parser = this.createParser(xContentType.xContent(), originalXContent); + PreviewDatafeedResponse parsed = PreviewDatafeedResponse.fromXContent(parser); + assertEquals(testInstance, parsed); + assertToXContentEquivalent( + XContentHelper.toXContent(testInstance, xContentType, false), + XContentHelper.toXContent(parsed, xContentType, false), + xContentType); + } + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedStatsTests.java new file mode 100644 index 00000000000..50c0809d201 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedStatsTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.datafeed; + +import org.elasticsearch.client.ml.NodeAttributes; +import org.elasticsearch.client.ml.NodeAttributesTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + +public class DatafeedStatsTests extends AbstractXContentTestCase { + + public static DatafeedStats createRandomInstance() { + String datafeedId = DatafeedConfigTests.randomValidDatafeedId(); + DatafeedState datafeedState = + randomFrom(DatafeedState.STARTED, DatafeedState.STARTING, DatafeedState.STOPPED, DatafeedState.STOPPING); + NodeAttributes nodeAttributes = null; + if (randomBoolean()) { + NodeAttributes randomAttributes = NodeAttributesTests.createRandom(); + int numberOfAttributes = randomIntBetween(1, 10); + Map attributes = new HashMap<>(numberOfAttributes); + for(int i = 0; i < numberOfAttributes; i++) { + String val = randomAlphaOfLength(10); + attributes.put("ml.key-"+i, val); + } + nodeAttributes = new NodeAttributes(randomAttributes.getId(), + randomAttributes.getName(), + randomAttributes.getEphemeralId(), + randomAttributes.getTransportAddress(), + attributes); + } + String assignmentReason = randomBoolean() ? randomAlphaOfLength(10) : null; + return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentReason); + } + + @Override + protected DatafeedStats createTestInstance() { + return createRandomInstance(); + } + + @Override + protected DatafeedStats doParseInstance(XContentParser parser) throws IOException { + return DatafeedStats.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.equals("node.attributes"); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java index 1946f70a230..667932d5912 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java @@ -34,9 +34,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; -import java.util.HashMap; import java.util.List; -import java.util.Map; public class JobTests extends AbstractXContentTestCase { @@ -77,93 +75,6 @@ public class JobTests extends AbstractXContentTestCase { assertNotNull(Job.PARSER.apply(parser, null).build()); } - public void testEquals_GivenDifferentClass() { - Job job = buildJobBuilder("foo").build(); - assertFalse(job.equals("a string")); - } - - public void testEquals_GivenDifferentIds() { - Date createTime = new Date(); - Job.Builder builder = buildJobBuilder("foo"); - builder.setCreateTime(createTime); - Job job1 = builder.build(); - builder.setId("bar"); - Job job2 = builder.build(); - assertFalse(job1.equals(job2)); - } - - public void testEquals_GivenDifferentRenormalizationWindowDays() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setRenormalizationWindowDays(3L); - jobDetails1.setCreateTime(date); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setRenormalizationWindowDays(4L); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentBackgroundPersistInterval() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setBackgroundPersistInterval(TimeValue.timeValueSeconds(10000L)); - jobDetails1.setCreateTime(date); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setBackgroundPersistInterval(TimeValue.timeValueSeconds(8000L)); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentModelSnapshotRetentionDays() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setModelSnapshotRetentionDays(10L); - jobDetails1.setCreateTime(date); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setModelSnapshotRetentionDays(8L); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentResultsRetentionDays() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setCreateTime(date); - jobDetails1.setResultsRetentionDays(30L); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setResultsRetentionDays(4L); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentCustomSettings() { - Job.Builder jobDetails1 = buildJobBuilder("foo"); - Map customSettings1 = new HashMap<>(); - customSettings1.put("key1", "value1"); - jobDetails1.setCustomSettings(customSettings1); - Job.Builder jobDetails2 = buildJobBuilder("foo"); - Map customSettings2 = new HashMap<>(); - customSettings2.put("key2", "value2"); - jobDetails2.setCustomSettings(customSettings2); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - public void testCopyConstructor() { for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { Job job = createTestInstance(); @@ -184,20 +95,6 @@ public class JobTests extends AbstractXContentTestCase { assertEquals("[job_type] must not be null", ex.getMessage()); } - public static Job.Builder buildJobBuilder(String id, Date date) { - Job.Builder builder = new Job.Builder(id); - builder.setCreateTime(date); - AnalysisConfig.Builder ac = createAnalysisConfig(); - DataDescription.Builder dc = new DataDescription.Builder(); - builder.setAnalysisConfig(ac); - builder.setDataDescription(dc); - return builder; - } - - public static Job.Builder buildJobBuilder(String id) { - return buildJobBuilder(id, new Date()); - } - public static String randomValidJobId() { CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); return generator.ofCodePointsLength(random(), 10, 10); @@ -228,9 +125,6 @@ public class JobTests extends AbstractXContentTestCase { if (randomBoolean()) { builder.setFinishedTime(new Date(randomNonNegativeLong())); } - if (randomBoolean()) { - builder.setLastDataTime(new Date(randomNonNegativeLong())); - } if (randomBoolean()) { builder.setEstablishedModelMemory(randomNonNegativeLong()); } @@ -265,6 +159,9 @@ public class JobTests extends AbstractXContentTestCase { if (randomBoolean()) { builder.setResultsIndexName(randomValidJobId()); } + if (randomBoolean()) { + builder.setDeleting(randomBoolean()); + } return builder; } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/AnomalyRecordTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/AnomalyRecordTests.java index a857cd3d9b1..39bfff3a7e8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/AnomalyRecordTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/AnomalyRecordTests.java @@ -38,6 +38,9 @@ public class AnomalyRecordTests extends AbstractXContentTestCase anomalyRecord.setActual(Collections.singletonList(randomDouble())); anomalyRecord.setTypical(Collections.singletonList(randomDouble())); anomalyRecord.setProbability(randomDouble()); + if (randomBoolean()) { + anomalyRecord.setMultiBucketImpact(randomDouble()); + } anomalyRecord.setRecordScore(randomDouble()); anomalyRecord.setInitialRecordScore(randomDouble()); anomalyRecord.setInterim(randomBoolean()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ActivateWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ActivateWatchResponseTests.java new file mode 100644 index 00000000000..136ecbc58c1 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ActivateWatchResponseTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.watcher; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.XContentTestUtils; + +import java.io.IOException; +import java.util.function.Predicate; + +/** + * Basic unit tests for {@link ActivateWatchResponse}. + * + * Note that we only sanity check watch status parsing here, as there + * are dedicated tests for it in {@link WatchStatusTests}. + */ +public class ActivateWatchResponseTests extends ESTestCase { + + public void testBasicParsing() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject() + .startObject("status") + .field("version", 42) + .field("execution_state", ExecutionState.ACKNOWLEDGED) + .startObject("state") + .field("active", false) + .endObject() + .endObject() + .endObject(); + BytesReference bytes = BytesReference.bytes(builder); + + ActivateWatchResponse response = parse(builder.contentType(), bytes); + WatchStatus status = response.getStatus(); + assertNotNull(status); + assertEquals(42, status.version()); + assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState()); + assertFalse(status.state().isActive()); + } + + public void testParsingWithMissingStatus() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject().endObject(); + BytesReference bytes = BytesReference.bytes(builder); + + expectThrows(IllegalArgumentException.class, () -> parse(builder.contentType(), bytes)); + } + + public void testParsingWithNullStatus() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject() + .nullField("status") + .endObject(); + BytesReference bytes = BytesReference.bytes(builder); + + expectThrows(XContentParseException.class, () -> parse(builder.contentType(), bytes)); + } + + public void testParsingWithUnknownKeys() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject() + .startObject("status") + .field("version", 42) + .field("execution_state", ExecutionState.ACKNOWLEDGED) + .startObject("state") + .field("active", true) + .endObject() + .endObject() + .endObject(); + BytesReference bytes = BytesReference.bytes(builder); + + Predicate excludeFilter = field -> field.equals("status.actions"); + BytesReference bytesWithRandomFields = XContentTestUtils.insertRandomFields( + builder.contentType(), bytes, excludeFilter, random()); + + ActivateWatchResponse response = parse(builder.contentType(), bytesWithRandomFields); + WatchStatus status = response.getStatus(); + assertNotNull(status); + assertEquals(42, status.version()); + assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState()); + assertTrue(status.state().isActive()); + } + + private ActivateWatchResponse parse(XContentType contentType, BytesReference bytes) throws IOException { + XContentParser parser = XContentFactory.xContent(contentType) + .createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput()); + parser.nextToken(); + return ActivateWatchResponse.fromXContent(parser); + } +} diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java index ab61f01f661..4115ef36a5c 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Response.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java @@ -124,9 +124,9 @@ public class Response { final Matcher matcher = WARNING_HEADER_PATTERN.matcher(warning); if (matcher.matches()) { warnings.add(matcher.group(1)); - continue; + } else { + warnings.add(warning); } - warnings.add(warning); } return warnings; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 3aa10762676..65eb92c0e8d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -59,6 +59,7 @@ import java.net.URI; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -70,10 +71,12 @@ import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods; import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode; import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -96,6 +99,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { private Node node; private CloseableHttpAsyncClient httpClient; private HostsTrackingFailureListener failureListener; + private boolean strictDeprecationMode; @Before @SuppressWarnings("unchecked") @@ -147,8 +151,9 @@ public class RestClientSingleHostTests extends RestClientTestCase { defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default"); node = new Node(new HttpHost("localhost", 9200)); failureListener = new HostsTrackingFailureListener(); + strictDeprecationMode = randomBoolean(); restClient = new RestClient(httpClient, 10000, defaultHeaders, - singletonList(node), null, failureListener, NodeSelector.ANY, false); + singletonList(node), null, failureListener, NodeSelector.ANY, strictDeprecationMode); } /** @@ -331,9 +336,54 @@ public class RestClientSingleHostTests extends RestClientTestCase { } assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode)); assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), Collections.emptySet()); + assertFalse(esResponse.hasWarnings()); } } + public void testDeprecationWarnings() throws IOException { + String chars = randomAsciiAlphanumOfLength(5); + assertDeprecationWarnings(singletonList("poorly formatted " + chars), singletonList("poorly formatted " + chars)); + assertDeprecationWarnings(singletonList(formatWarning(chars)), singletonList(chars)); + assertDeprecationWarnings( + Arrays.asList(formatWarning(chars), "another one", "and another"), + Arrays.asList(chars, "another one", "and another")); + + } + + private void assertDeprecationWarnings(List warningHeaderTexts, List warningBodyTexts) throws IOException { + String method = randomFrom(getHttpMethods()); + Request request = new Request(method, "/200"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + for (String warningHeaderText : warningHeaderTexts) { + options.addHeader("Warning", warningHeaderText); + } + request.setOptions(options); + + Response response; + if (strictDeprecationMode) { + try { + restClient.performRequest(request); + fail("expected ResponseException because strict deprecation mode is enabled"); + return; + } catch (ResponseException e) { + assertThat(e.getMessage(), containsString("\nWarnings: " + warningBodyTexts)); + response = e.getResponse(); + } + } else { + response = restClient.performRequest(request); + } + assertTrue(response.hasWarnings()); + assertEquals(warningBodyTexts, response.getWarnings()); + } + + /** + * Emulates Elasticsearch's DeprecationLogger.formatWarning in simple + * cases. We don't have that available because we're testing against 1.7. + */ + private static String formatWarning(String warningBody) { + return "299 Elasticsearch-1.2.2-SNAPSHOT-eeeeeee \"" + warningBody + "\" \"Mon, 01 Jan 2001 00:00:00 GMT\""; + } + private HttpUriRequest performRandomRequest(String method) throws Exception { String uriAsString = "/" + randomStatusCode(getRandom()); Request request = new Request(method, uriAsString); diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 1eed8b41c0e..a44e670542b 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -149,27 +149,39 @@ subprojects { task buildBwcVersion(type: Exec) { dependsOn checkoutBwcBranch, writeBuildMetadata - // send RUNTIME_JAVA_HOME so the build doesn't fails on newer version the branch doesn't know about - environment('RUNTIME_JAVA_HOME', getJavaHome(it, rootProject.ext.minimumRuntimeVersion.getMajorVersion() as int)) workingDir = checkoutDir - // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds - if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 8)) - } else if ("6.2".equals(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 9)) - } else if (["6.3", "6.4"].contains(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 10)) - } else if (["6.x"].contains(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 11)) - } else { - environment('JAVA_HOME', project.compilerJavaHome) + doFirst { + // Execution time so that the checkouts are available + List lines = file("$checkoutDir/.ci/java-versions.properties").readLines() + environment( + 'JAVA_HOME', + getJavaHome(it, Integer.parseInt( + lines + .findAll({ it.startsWith("ES_BUILD_JAVA=java") }) + .collect({ it.replace("ES_BUILD_JAVA=java", "").trim() }) + .join("!!") + )) + ) + environment( + 'RUNTIME_JAVA_HOME', + getJavaHome(it, Integer.parseInt( + lines + .findAll({ it.startsWith("ES_RUNTIME_JAVA=java") }) + .collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() }) + .join("!!") + )) + ) } + if (Os.isFamily(Os.FAMILY_WINDOWS)) { executable 'cmd' args '/C', 'call', new File(checkoutDir, 'gradlew').toString() } else { executable new File(checkoutDir, 'gradlew').toString() } + if (gradle.startParameter.isOffline()) { + args "--offline" + } for (String dir : projectDirs) { args ":${dir.replace('/', ':')}:assemble" } @@ -237,4 +249,4 @@ class IndentingOutputStream extends OutputStream { } } } -} \ No newline at end of file +} diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index c19c12cfe44..d74f106c50b 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -228,7 +228,7 @@ final class JvmOptionsParser { // no range is present, apply the JVM option to the specified major version only upper = lower; } else if (end == null) { - // a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specifed one + // a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specified one upper = Integer.MAX_VALUE; } else { // a range of the form \\d+-\\d+ is present, apply the JVM option to the specified range of major versions diff --git a/docs/README.asciidoc b/docs/README.asciidoc index 89058849a98..176d3324cd1 100644 --- a/docs/README.asciidoc +++ b/docs/README.asciidoc @@ -63,6 +63,8 @@ for its modifiers: * `// TESTRESPONSE[_cat]`: Add substitutions for testing `_cat` responses. Use this after all other substitutions so it doesn't make other substitutions difficult. + * `// TESTRESPONSE[skip:reason]`: Skip the assertions specified by this + response. * `// TESTSETUP`: Marks this snippet as the "setup" for all other snippets in this file. This is a somewhat natural way of structuring documentation. You say "this is the data we use to explain this feature" then you add the @@ -73,6 +75,10 @@ for its modifiers: right in the documentation file. In general, we should prefer `// TESTSETUP` over `// TEST[setup:name]` because it makes it more clear what steps have to be taken before the examples will work. +* `// NOTCONSOLE`: Marks this snippet as neither `// CONSOLE` nor + `// TESTRESPONSE`, excluding it from the list of unconverted snippets. We + should only use this for snippets that *are* JSON but are *not* responses or + requests. In addition to the standard CONSOLE syntax these snippets can contain blocks of yaml surrounded by markers like this: diff --git a/docs/java-rest/high-level/document/delete.asciidoc b/docs/java-rest/high-level/document/delete.asciidoc index 5d263c894c6..1b32fca7042 100644 --- a/docs/java-rest/high-level/document/delete.asciidoc +++ b/docs/java-rest/high-level/document/delete.asciidoc @@ -1,14 +1,20 @@ -[[java-rest-high-document-delete]] +-- +:api: delete +:request: DeleteRequest +:response: DeleteResponse +-- + +[id="{upid}-{api}"] === Delete API -[[java-rest-high-document-delete-request]] +[id="{upid}-{api}-request"] ==== Delete Request -A `DeleteRequest` requires the following arguments: +A +{request}+ requires the following arguments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Index <2> Type @@ -19,82 +25,47 @@ The following arguments can optionally be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-routing] +include-tagged::{doc-tests-file}[{api}-request-routing] -------------------------------------------------- <1> Routing value ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-timeout] +include-tagged::{doc-tests-file}[{api}-request-timeout] -------------------------------------------------- <1> Timeout to wait for primary shard to become available as a `TimeValue` <2> Timeout to wait for primary shard to become available as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-refresh] +include-tagged::{doc-tests-file}[{api}-request-refresh] -------------------------------------------------- <1> Refresh policy as a `WriteRequest.RefreshPolicy` instance <2> Refresh policy as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-version] +include-tagged::{doc-tests-file}[{api}-request-version] -------------------------------------------------- <1> Version ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-version-type] +include-tagged::{doc-tests-file}[{api}-request-version-type] -------------------------------------------------- <1> Version type -[[java-rest-high-document-delete-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-execute] --------------------------------------------------- - -[[java-rest-high-document-delete-async]] -==== Asynchronous Execution - -The asynchronous execution of a delete request requires both the `DeleteRequest` -instance and an `ActionListener` instance to be passed to the asynchronous -method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-execute-async] --------------------------------------------------- -<1> The `DeleteRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `DeleteResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of failure. The raised exception is provided as an argument - -[[java-rest-high-document-delete-response]] +[id="{upid}-{api}-response"] ==== Delete Response -The returned `DeleteResponse` allows to retrieve information about the executed +The returned +{response}+ allows to retrieve information about the executed operation as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> Handle the situation where number of successful shards is less than total shards @@ -105,7 +76,7 @@ It is also possible to check whether the document was found or not: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-notfound] +include-tagged::{doc-tests-file}[{api}-notfound] -------------------------------------------------- <1> Do something if the document to be deleted was not found @@ -114,7 +85,7 @@ be thrown: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-conflict] +include-tagged::{doc-tests-file}[{api}-conflict] -------------------------------------------------- <1> The raised exception indicates that a version conflict error was returned diff --git a/docs/java-rest/high-level/document/exists.asciidoc b/docs/java-rest/high-level/document/exists.asciidoc index d14c9fdd66a..ac6968d1f37 100644 --- a/docs/java-rest/high-level/document/exists.asciidoc +++ b/docs/java-rest/high-level/document/exists.asciidoc @@ -1,12 +1,18 @@ -[[java-rest-high-document-exists]] +-- +:api: exists +:request: GetRequest +:response: boolean +-- + +[id="{upid}-{api}"] === Exists API The exists API returns `true` if a document exists, and `false` otherwise. -[[java-rest-high-document-exists-request]] +[id="{upid}-{api}-request"] ==== Exists Request -It uses `GetRequest` just like the <>. +It uses +{request}+ just like the <>. All of its <> are supported. Since `exists()` only returns `true` or `false`, we recommend turning off fetching `_source` and any stored fields so the request is @@ -14,7 +20,7 @@ slightly lighter: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Index <2> Type @@ -22,39 +28,4 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-request] <4> Disable fetching `_source`. <5> Disable fetching stored fields. -[[java-rest-high-document-exists-sync]] -==== Synchronous Execution - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute] --------------------------------------------------- - -[[java-rest-high-document-exists-async]] -==== Asynchronous Execution - -The asynchronous execution of exists request requires both the `GetRequest` -instance and an `ActionListener` instance to be passed to the asynchronous -method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-async] --------------------------------------------------- -<1> The `GetRequest` to execute and the `ActionListener` to use when -the execution completes. - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `GetResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument. -<2> Called in case of failure. The raised exception is provided as an argument. +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/document/get.asciidoc b/docs/java-rest/high-level/document/get.asciidoc index 504b22a8e6d..f3ea9434f71 100644 --- a/docs/java-rest/high-level/document/get.asciidoc +++ b/docs/java-rest/high-level/document/get.asciidoc @@ -1,44 +1,50 @@ -[[java-rest-high-document-get]] +-- +:api: get +:request: GetRequest +:response: GetResponse +-- + +[id="{upid}-{api}"] === Get API -[[java-rest-high-document-get-request]] +[id="{upid}-{api}-request"] ==== Get Request -A `GetRequest` requires the following arguments: +A +{request}+ requires the following arguments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Index <2> Type <3> Document id -[[java-rest-high-document-get-request-optional-arguments]] +[id="{upid}-{api}-request-optional-arguments"] ==== Optional arguments The following arguments can optionally be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-no-source] +include-tagged::{doc-tests-file}[{api}-request-no-source] -------------------------------------------------- <1> Disable source retrieval, enabled by default ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-source-include] +include-tagged::{doc-tests-file}[{api}-request-source-include] -------------------------------------------------- <1> Configure source inclusion for specific fields ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-source-exclude] +include-tagged::{doc-tests-file}[{api}-request-source-exclude] -------------------------------------------------- <1> Configure source exclusion for specific fields ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-stored] +include-tagged::{doc-tests-file}[{api}-request-stored] -------------------------------------------------- <1> Configure retrieval for specific stored fields (requires fields to be stored separately in the mappings) @@ -47,92 +53,57 @@ separately in the mappings) ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-routing] +include-tagged::{doc-tests-file}[{api}-request-routing] -------------------------------------------------- <1> Routing value ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-preference] +include-tagged::{doc-tests-file}[{api}-request-preference] -------------------------------------------------- <1> Preference value ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-realtime] +include-tagged::{doc-tests-file}[{api}-request-realtime] -------------------------------------------------- <1> Set realtime flag to `false` (`true` by default) ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-refresh] +include-tagged::{doc-tests-file}[{api}-request-refresh] -------------------------------------------------- <1> Perform a refresh before retrieving the document (`false` by default) ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-version] +include-tagged::{doc-tests-file}[{api}-request-version] -------------------------------------------------- <1> Version ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-version-type] +include-tagged::{doc-tests-file}[{api}-request-version-type] -------------------------------------------------- <1> Version type -[[java-rest-high-document-get-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-execute] --------------------------------------------------- - -[[java-rest-high-document-get-async]] -==== Asynchronous Execution - -The asynchronous execution of a get request requires both the `GetRequest` -instance and an `ActionListener` instance to be passed to the asynchronous -method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-execute-async] --------------------------------------------------- -<1> The `GetRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `GetResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument. -<2> Called in case of failure. The raised exception is provided as an argument. - -[[java-rest-high-document-get-response]] +[id="{upid}-{api}-response"] ==== Get Response -The returned `GetResponse` allows to retrieve the requested document along with +The returned +{response}+ allows to retrieve the requested document along with its metadata and eventually stored fields. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> Retrieve the document as a `String` <2> Retrieve the document as a `Map` <3> Retrieve the document as a `byte[]` <4> Handle the scenario where the document was not found. Note that although -the returned response has `404` status code, a valid `GetResponse` is +the returned response has `404` status code, a valid +{response}+ is returned rather than an exception thrown. Such response does not hold any source document and its `isExists` method returns `false`. @@ -142,7 +113,7 @@ which needs to be handled as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-indexnotfound] +include-tagged::{doc-tests-file}[{api}-indexnotfound] -------------------------------------------------- <1> Handle the exception thrown because the index does not exist @@ -151,6 +122,6 @@ document has a different version number, a version conflict is raised: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-conflict] +include-tagged::{doc-tests-file}[{api}-conflict] -------------------------------------------------- <1> The raised exception indicates that a version conflict error was returned diff --git a/docs/java-rest/high-level/document/update.asciidoc b/docs/java-rest/high-level/document/update.asciidoc index 1c780093115..743eb3da0a8 100644 --- a/docs/java-rest/high-level/document/update.asciidoc +++ b/docs/java-rest/high-level/document/update.asciidoc @@ -1,14 +1,20 @@ -[[java-rest-high-document-update]] +-- +:api: update +:request: UpdateRequest +:response: UpdateResponse +-- + +[id="{upid}-{api}"] === Update API -[[java-rest-high-document-update-request]] +[id="{upid}-{api}-request"] ==== Update Request -An `UpdateRequest` requires the following arguments: +An +{request}+ requires the following arguments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Index <2> Type @@ -22,7 +28,7 @@ The script can be provided as an inline script: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-inline-script] +include-tagged::{doc-tests-file}[{api}-request-with-inline-script] -------------------------------------------------- <1> Script parameters provided as a `Map` of objects <2> Create an inline script using the `painless` language and the previous parameters @@ -32,7 +38,7 @@ Or as a stored script: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-stored-script] +include-tagged::{doc-tests-file}[{api}-request-with-stored-script] -------------------------------------------------- <1> Reference to a script stored under the name `increment-field` in the `painless` language <2> Sets the script in the update request @@ -45,27 +51,27 @@ The partial document can be provided in different ways: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-doc-as-string] +include-tagged::{doc-tests-file}[{api}-request-with-doc-as-string] -------------------------------------------------- <1> Partial document source provided as a `String` in JSON format ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-doc-as-map] +include-tagged::{doc-tests-file}[{api}-request-with-doc-as-map] -------------------------------------------------- <1> Partial document source provided as a `Map` which gets automatically converted to JSON format ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-with-doc-as-xcontent] +include-tagged::{doc-tests-file}[{api}-request-with-doc-as-xcontent] -------------------------------------------------- <1> Partial document source provided as an `XContentBuilder` object, the Elasticsearch built-in helpers to generate JSON content ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-shortcut] +include-tagged::{doc-tests-file}[{api}-request-shortcut] -------------------------------------------------- <1> Partial document source provided as `Object` key-pairs, which gets converted to JSON format @@ -76,7 +82,7 @@ will be inserted as a new document using the `upsert` method: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-upsert] +include-tagged::{doc-tests-file}[{api}-request-upsert] -------------------------------------------------- <1> Upsert document source provided as a `String` @@ -89,27 +95,27 @@ The following arguments can optionally be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-routing] +include-tagged::{doc-tests-file}[{api}-request-routing] -------------------------------------------------- <1> Routing value ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-timeout] +include-tagged::{doc-tests-file}[{api}-request-timeout] -------------------------------------------------- <1> Timeout to wait for primary shard to become available as a `TimeValue` <2> Timeout to wait for primary shard to become available as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-refresh] +include-tagged::{doc-tests-file}[{api}-request-refresh] -------------------------------------------------- <1> Refresh policy as a `WriteRequest.RefreshPolicy` instance <2> Refresh policy as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-retry] +include-tagged::{doc-tests-file}[{api}-request-retry] -------------------------------------------------- <1> How many times to retry the update operation if the document to update has been changed by another operation between the get and indexing phases of the @@ -117,103 +123,68 @@ update operation ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-no-source] +include-tagged::{doc-tests-file}[{api}-request-no-source] -------------------------------------------------- <1> Enable source retrieval, disabled by default ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-source-include] +include-tagged::{doc-tests-file}[{api}-request-source-include] -------------------------------------------------- <1> Configure source inclusion for specific fields ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-source-exclude] +include-tagged::{doc-tests-file}[{api}-request-source-exclude] -------------------------------------------------- <1> Configure source exclusion for specific fields ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-version] +include-tagged::{doc-tests-file}[{api}-request-version] -------------------------------------------------- <1> Version ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-detect-noop] +include-tagged::{doc-tests-file}[{api}-request-detect-noop] -------------------------------------------------- <1> Disable the noop detection ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-scripted-upsert] +include-tagged::{doc-tests-file}[{api}-request-scripted-upsert] -------------------------------------------------- <1> Indicate that the script must run regardless of whether the document exists or not, ie the script takes care of creating the document if it does not already exist. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-doc-upsert] +include-tagged::{doc-tests-file}[{api}-request-doc-upsert] -------------------------------------------------- <1> Indicate that the partial document must be used as the upsert document if it does not exist yet. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-active-shards] +include-tagged::{doc-tests-file}[{api}-request-active-shards] -------------------------------------------------- <1> Sets the number of shard copies that must be active before proceeding with the update operation. <2> Number of shard copies provided as a `ActiveShardCount`: can be `ActiveShardCount.ALL`, `ActiveShardCount.ONE` or `ActiveShardCount.DEFAULT` (default) -[[java-rest-high-document-update-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-execute] --------------------------------------------------- - -[[java-rest-high-document-update-async]] -==== Asynchronous Execution - -The asynchronous execution of an update request requires both the `UpdateRequest` -instance and an `ActionListener` instance to be passed to the asynchronous -method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-execute-async] --------------------------------------------------- -<1> The `UpdateRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `UpdateResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument. -<2> Called in case of failure. The raised exception is provided as an argument. - -[[java-rest-high-document-update-response]] +[id="{upid}-{api}-response"] ==== Update Response -The returned `UpdateResponse` allows to retrieve information about the executed - operation as follows: +The returned +{response}+ allows to retrieve information about the executed +operation as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> Handle the case where the document was created for the first time (upsert) <2> Handle the case where the document was updated @@ -227,7 +198,7 @@ source of the updated document: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-getresult] +include-tagged::{doc-tests-file}[{api}-getresult] -------------------------------------------------- <1> Retrieve the updated document as a `GetResult` <2> Retrieve the source of the updated document as a `String` @@ -240,7 +211,7 @@ It is also possible to check for shard failures: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-failure] +include-tagged::{doc-tests-file}[{api}-failure] -------------------------------------------------- <1> Handle the situation where number of successful shards is less than total shards @@ -252,7 +223,7 @@ which needs to be handled as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-docnotfound] +include-tagged::{doc-tests-file}[{api}-docnotfound] -------------------------------------------------- <1> Handle the exception thrown because the document not exist @@ -261,6 +232,6 @@ be thrown: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-conflict] +include-tagged::{doc-tests-file}[{api}-conflict] -------------------------------------------------- <1> The raised exception indicates that a version conflict error was returned. diff --git a/docs/java-rest/high-level/execution.asciidoc b/docs/java-rest/high-level/execution.asciidoc index fc4f4c0ec60..4dfb11e196d 100644 --- a/docs/java-rest/high-level/execution.asciidoc +++ b/docs/java-rest/high-level/execution.asciidoc @@ -38,7 +38,7 @@ completed the `ActionListener` is called back using the `onResponse` method if the execution successfully completed or using the `onFailure` method if it failed. -A typical listener for +{response}+ looks like: +A typical listener for +{api}+ looks like: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/java-rest/high-level/ml/close-job.asciidoc b/docs/java-rest/high-level/ml/close-job.asciidoc index edadb9f40a2..8a38b498629 100644 --- a/docs/java-rest/high-level/ml/close-job.asciidoc +++ b/docs/java-rest/high-level/ml/close-job.asciidoc @@ -1,18 +1,23 @@ -[[java-rest-high-x-pack-ml-close-job]] +-- +:api: close-job +:request: CloseJobRequest +:response: CloseJobResponse +-- +[id="{upid}-{api}"] === Close Job API The Close Job API provides the ability to close {ml} jobs in the cluster. -It accepts a `CloseJobRequest` object and responds -with a `CloseJobResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-close-job-request]] +[id="{upid}-{api}-request"] ==== Close Job Request -A `CloseJobRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing existing job IDs <2> Optionally used to close a failed job, or to forcefully close a job @@ -22,38 +27,14 @@ which has not responded to its initial close request. <4> Optionally setting the `timeout` value for how long the execution should wait for the job to be closed. -[[java-rest-high-x-pack-ml-close-job-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. +[id="{upid}-{api}-response"] +==== Close Job Response ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-execute] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- -<1> `isClosed()` from the `CloseJobResponse` indicates if the job was successfully +<1> `isClosed()` from the +{response}+ indicates if the job was successfully closed or not. -[[java-rest-high-x-pack-ml-close-job-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-execute-async] --------------------------------------------------- -<1> The `CloseJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `CloseJobResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-close-job-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/delete-calendar.asciidoc b/docs/java-rest/high-level/ml/delete-calendar.asciidoc index 8f25576a96f..e7d5318a465 100644 --- a/docs/java-rest/high-level/ml/delete-calendar.asciidoc +++ b/docs/java-rest/high-level/ml/delete-calendar.asciidoc @@ -1,59 +1,33 @@ -[[java-rest-high-x-pack-ml-delete-calendar]] +-- +:api: delete-calendar +:request: DeleteCalendarRequest +:response: AcknowledgedResponse +-- +[id="{upid}-{api}"] === Delete Calendar API Delete a {ml} calendar. -The API accepts a `DeleteCalendarRequest` and responds -with a `AcknowledgedResponse` object. +The API accepts a +{request}+ and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-delete-calendar-request]] +[id="{upid}-{api}-request"] ==== Delete Calendar Request A `DeleteCalendar` object requires a non-null `calendarId`. ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-request] +include-tagged::{doc-tests-file}[{api}-request] --------------------------------------------------- <1> Constructing a new request referencing an existing Calendar -[[java-rest-high-x-pack-ml-delete-calendar-response]] +[id="{upid}-{api}-response"] ==== Delete Calendar Response -The returned `AcknowledgedResponse` object indicates the acknowledgement of the request: +The returned +{response}+ object indicates the acknowledgement of the request: ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-response] +include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- <1> `isAcknowledged` was the deletion request acknowledged or not -[[java-rest-high-x-pack-ml-delete-calendar-execution]] -==== Execution -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-delete-calendar-async]] -==== Delete Calendar Asynchronously - -This request can also be made asynchronously. -["source","java",subs="attributes,callouts,macros"] ---------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-execute-async] ---------------------------------------------------- -<1> The `DeleteCalendarRequest` to execute and the `ActionListener` to alert on completion or error. - -The deletion request returns immediately. Once the request is completed, the `ActionListener` is -called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when -making the request. - -A typical listener for a `DeleteCalendarRequest` could be defined as follows: - -["source","java",subs="attributes,callouts,macros"] ---------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-calendar-listener] ---------------------------------------------------- -<1> The action to be taken when it is completed -<2> What to do when a failure occurs +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/delete-datafeed.asciidoc b/docs/java-rest/high-level/ml/delete-datafeed.asciidoc index 68741651b33..02bfafd7951 100644 --- a/docs/java-rest/high-level/ml/delete-datafeed.asciidoc +++ b/docs/java-rest/high-level/ml/delete-datafeed.asciidoc @@ -1,49 +1,32 @@ -[[java-rest-high-x-pack-ml-delete-datafeed]] +-- +:api: delete-datafeed +:request: DeleteDatafeedRequest +:response: AcknowledgedResponse +-- +[id="{upid}-delete-datafeed"] === Delete Datafeed API -[[java-rest-high-x-pack-machine-learning-delete-datafeed-request]] +[id="{upid}-{api}-request"] ==== Delete Datafeed Request -A `DeleteDatafeedRequest` object requires a non-null `datafeedId` and can optionally set `force`. -Can be executed as follows: +A +{request}+ object requires a non-null `datafeedId` and can optionally set `force`. ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request] +include-tagged::{doc-tests-file}[{api}-request] --------------------------------------------------- <1> Use to forcefully delete a started datafeed; this method is quicker than stopping and deleting the datafeed. Defaults to `false`. -[[java-rest-high-x-pack-machine-learning-delete-datafeed-response]] +include::../execution.asciidoc[] + +[id="{upid}-{api}-response"] ==== Delete Datafeed Response -The returned `AcknowledgedResponse` object indicates the acknowledgement of the request: +The returned +{response}+ object indicates the acknowledgement of the request: ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-response] +include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- <1> `isAcknowledged` was the deletion request acknowledged or not - -[[java-rest-high-x-pack-machine-learning-delete-datafeed-async]] -==== Delete Datafeed Asynchronously - -This request can also be made asynchronously. -["source","java",subs="attributes,callouts,macros"] ---------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-async] ---------------------------------------------------- -<1> The `DeleteDatafeedRequest` to execute and the `ActionListener` to alert on completion or error. - -The deletion request returns immediately. Once the request is completed, the `ActionListener` is -called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when -making the request. - -A typical listener for a `DeleteDatafeedRequest` could be defined as follows: - -["source","java",subs="attributes,callouts,macros"] ---------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-listener] ---------------------------------------------------- -<1> The action to be taken when it is completed -<2> What to do when a failure occurs diff --git a/docs/java-rest/high-level/ml/delete-forecast.asciidoc b/docs/java-rest/high-level/ml/delete-forecast.asciidoc index 09aa5c734ff..961254b4815 100644 --- a/docs/java-rest/high-level/ml/delete-forecast.asciidoc +++ b/docs/java-rest/high-level/ml/delete-forecast.asciidoc @@ -1,20 +1,25 @@ -[[java-rest-high-x-pack-ml-delete-forecast]] +-- +:api: delete-forecast +:request: DeleteForecastRequest +:response: AcknowledgedResponse +-- +[id="{upid}-{api}"] === Delete Forecast API The Delete Forecast API provides the ability to delete a {ml} job's forecast in the cluster. -It accepts a `DeleteForecastRequest` object and responds -with an `AcknowledgedResponse` object. +It accepts a +{request}+ object and responds +with an +{response}+ object. -[[java-rest-high-x-pack-ml-delete-forecast-request]] +[id="{upid}-{api}-request"] ==== Delete Forecast Request -A `DeleteForecastRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. All other fields are optional for the request. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -24,55 +29,23 @@ The following arguments are optional. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-request-options] +include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> Sets the specific forecastIds to delete, can be set to `_all` to indicate ALL forecasts for the given `jobId` <2> Set the timeout for the request to respond, default is 30 seconds <3> Set the `allow_no_forecasts` option. When `true` no error will be returned if an `_all` -request finds no forecasts. It defaults to `true` +request finds no forecasts. It defaults to `true` -[[java-rest-high-x-pack-ml-delete-forecast-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-delete-forecast-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-execute-async] --------------------------------------------------- -<1> The `DeleteForecastRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `AcknowledgedResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-delete-forecast-response]] +[id="{upid}-{api}-response"] ==== Delete Forecast Response -An `AcknowledgedResponse` contains an acknowledgement of the forecast(s) deletion +An +{response}+ contains an acknowledgement of the forecast(s) deletion ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-delete-forecast-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> `isAcknowledged()` indicates if the forecast was successfully deleted or not. + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/delete-job.asciidoc b/docs/java-rest/high-level/ml/delete-job.asciidoc index 43f1e2fb02b..a8c6b276dd4 100644 --- a/docs/java-rest/high-level/ml/delete-job.asciidoc +++ b/docs/java-rest/high-level/ml/delete-job.asciidoc @@ -1,49 +1,53 @@ -[[java-rest-high-x-pack-ml-delete-job]] +-- +:api: delete-job +:request: DeleteJobRequest +:response: AcknowledgedResponse +-- +[id="{upid}-{api}"] === Delete Job API -[[java-rest-high-x-pack-machine-learning-delete-job-request]] +[id="{upid}-{api}-request"] ==== Delete Job Request -A `DeleteJobRequest` object requires a non-null `jobId` and can optionally set `force`. -Can be executed as follows: +A +{request}+ object requires a non-null `jobId` and can optionally set `force`. ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request] +include-tagged::{doc-tests-file}[{api}-request] +--------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` + +==== Optional Arguments + +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-force] --------------------------------------------------- <1> Use to forcefully delete an opened job; this method is quicker than closing and deleting the job. -Defaults to `false` +Defaults to `false`. -[[java-rest-high-x-pack-machine-learning-delete-job-response]] +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-wait-for-completion] +--------------------------------------------------- +<1> Use to set whether the request should wait until the operation has completed before returning. +Defaults to `true`. + + +[id="{upid}-{api}-response"] ==== Delete Job Response -The returned `AcknowledgedResponse` object indicates the acknowledgement of the request: -["source","java",subs="attributes,callouts,macros"] ---------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response] ---------------------------------------------------- -<1> `isAcknowledged` was the deletion request acknowledged or not - -[[java-rest-high-x-pack-machine-learning-delete-job-async]] -==== Delete Job Asynchronously - -This request can also be made asynchronously. -["source","java",subs="attributes,callouts,macros"] ---------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-async] ---------------------------------------------------- -<1> The `DeleteJobRequest` to execute and the `ActionListener` to alert on completion or error. - -The deletion request returns immediately. Once the request is completed, the `ActionListener` is -called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when -making the request. - -A typical listener for a `DeleteJobRequest` could be defined as follows: +The returned +{response}+ object indicates the acknowledgement of the job deletion or +the deletion task depending on whether the request was set to wait for completion: ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-listener] +include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- -<1> The action to be taken when it is completed -<2> What to do when a failure occurs +<1> whether was job deletion was acknowledged or not; will be `null` when set not to wait for completion +<2> the id of the job deletion task; will be `null` when set to wait for completion + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/flush-job.asciidoc b/docs/java-rest/high-level/ml/flush-job.asciidoc index 1f815bba0d5..e721d48d4d1 100644 --- a/docs/java-rest/high-level/ml/flush-job.asciidoc +++ b/docs/java-rest/high-level/ml/flush-job.asciidoc @@ -1,20 +1,25 @@ -[[java-rest-high-x-pack-ml-flush-job]] +-- +:api: flush-job +:request: FlushJobRequest +:response: FlushJobResponse +-- +[id="{upid}-{api}"] === Flush Job API The Flush Job API provides the ability to flush a {ml} job's datafeed in the cluster. -It accepts a `FlushJobRequest` object and responds -with a `FlushJobResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-flush-job-request]] +[id="{upid}-{api}-request"] ==== Flush Job Request -A `FlushJobRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. All other fields are optional for the request. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -24,7 +29,7 @@ The following arguments are optional. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request-options] +include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> Set request to calculate the interim results <2> Set the advanced time to flush to the particular time value @@ -34,50 +39,18 @@ to calculate the interim results (requires `calc_interim` to be `true`) to calculate interim results (requires `calc_interim` to be `true`) <5> Set the skip time to skip a particular time value -[[java-rest-high-x-pack-ml-flush-job-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-flush-job-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute-async] --------------------------------------------------- -<1> The `FlushJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `FlushJobResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-flush-job-response]] +[id="{upid}-{api}-response"] ==== Flush Job Response -A `FlushJobResponse` contains an acknowledgement and an optional end date for the +A +{response}+ contains an acknowledgement and an optional end date for the last finalized bucket ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> `isFlushed()` indicates if the job was successfully flushed or not. <2> `getLastFinalizedBucketEnd()` provides the timestamp -(in milliseconds-since-the-epoch) of the end of the last bucket that was processed. \ No newline at end of file +(in milliseconds-since-the-epoch) of the end of the last bucket that was processed. + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/forecast-job.asciidoc b/docs/java-rest/high-level/ml/forecast-job.asciidoc index 88bd5fdb532..48d899d6814 100644 --- a/docs/java-rest/high-level/ml/forecast-job.asciidoc +++ b/docs/java-rest/high-level/ml/forecast-job.asciidoc @@ -1,20 +1,25 @@ -[[java-rest-high-x-pack-ml-forecast-job]] +-- +:api: forecast-job +:request: ForecastJobRequest +:response: ForecastJobResponse +-- +[id="{upid}-{api}"] === Forecast Job API The Forecast Job API provides the ability to forecast a {ml} job's behavior based on historical data. -It accepts a `ForecastJobRequest` object and responds -with a `ForecastJobResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-forecast-job-request]] +[id="{upid}-{api}-request"] ==== Forecast Job Request -A `ForecastJobRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. All other fields are optional for the request. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -24,53 +29,21 @@ The following arguments are optional. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request-options] +include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> Set when the forecast for the job should expire <2> Set how far into the future should the forecast predict -[[java-rest-high-x-pack-ml-forecast-job-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-forecast-job-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute-async] --------------------------------------------------- -<1> The `ForecastJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `ForecastJobResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-forecast-job-response]] +[id="{upid}-{api}-response"] ==== Forecast Job Response -A `ForecastJobResponse` contains an acknowledgement and the forecast ID +A +{response}+ contains an acknowledgement and the forecast ID ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> `isAcknowledged()` indicates if the forecast was successful -<2> `getForecastId()` provides the ID of the forecast that was created \ No newline at end of file +<2> `getForecastId()` provides the ID of the forecast that was created + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/get-buckets.asciidoc b/docs/java-rest/high-level/ml/get-buckets.asciidoc index f77b368a495..f150695befe 100644 --- a/docs/java-rest/high-level/ml/get-buckets.asciidoc +++ b/docs/java-rest/high-level/ml/get-buckets.asciidoc @@ -1,18 +1,23 @@ -[[java-rest-high-x-pack-ml-get-buckets]] +-- +:api: get-buckets +:request: GetBucketsRequest +:response: GetBucketsResponse +-- +[id="{upid}-{api}"] === Get Buckets API The Get Buckets API retrieves one or more bucket results. -It accepts a `GetBucketsRequest` object and responds -with a `GetBucketsResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-buckets-request]] +[id="{upid}-{api}-request"] ==== Get Buckets Request -A `GetBucketsRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -21,105 +26,69 @@ The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-timestamp] +include-tagged::{doc-tests-file}[{api}-timestamp] -------------------------------------------------- <1> The timestamp of the bucket to get. Otherwise it will return all buckets. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-anomaly-score] +include-tagged::{doc-tests-file}[{api}-anomaly-score] -------------------------------------------------- <1> Buckets with anomaly scores greater or equal than this value will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-desc] +include-tagged::{doc-tests-file}[{api}-desc] -------------------------------------------------- <1> If `true`, the buckets are sorted in descending order. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end] +include-tagged::{doc-tests-file}[{api}-end] -------------------------------------------------- <1> Buckets with timestamps earlier than this time will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-exclude-interim] +include-tagged::{doc-tests-file}[{api}-exclude-interim] -------------------------------------------------- <1> If `true`, interim results will be excluded. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-expand] +include-tagged::{doc-tests-file}[{api}-expand] -------------------------------------------------- <1> If `true`, buckets will include their anomaly records. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-page] +include-tagged::{doc-tests-file}[{api}-page] -------------------------------------------------- <1> The page parameters `from` and `size`. `from` specifies the number of buckets to skip. `size` specifies the maximum number of buckets to get. Defaults to `0` and `100` respectively. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-sort] +include-tagged::{doc-tests-file}[{api}-sort] -------------------------------------------------- <1> The field to sort buckets on. Defaults to `timestamp`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-start] +include-tagged::{doc-tests-file}[{api}-start] -------------------------------------------------- <1> Buckets with timestamps on or after this time will be returned. -[[java-rest-high-x-pack-ml-get-buckets-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute] --------------------------------------------------- - - -[[java-rest-high-x-pack-ml-get-buckets-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute-async] --------------------------------------------------- -<1> The `GetBucketsRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `GetBucketsResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-buckets-response]] +[id="{upid}-{api}-response"] ==== Get Buckets Response -The returned `GetBucketsResponse` contains the requested buckets: +The returned +{response}+ contains the requested buckets: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of buckets that were matched <2> The buckets retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-calendars.asciidoc b/docs/java-rest/high-level/ml/get-calendars.asciidoc index d9247758a46..7c78612e064 100644 --- a/docs/java-rest/high-level/ml/get-calendars.asciidoc +++ b/docs/java-rest/high-level/ml/get-calendars.asciidoc @@ -1,83 +1,53 @@ -[[java-rest-high-x-pack-ml-get-calendars]] +-- +:api: get-calendars +:request: GetCalendarsRequest +:response: GetCalendarsResponse +-- +[id="{upid}-{api}"] === Get Calendars API Retrieves one or more calendar objects. -It accepts a `GetCalendarsRequest` and responds -with a `GetCalendarsResponse` object. +It accepts a +{request}+ and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-calendars-request]] +[id="{upid}-{api}-request"] ==== Get Calendars Request -By default a `GetCalendarsRequest` with no calendar Id set will return all +By default a +{request}+ with no calendar Id set will return all calendars. Using the literal `_all` also returns all calendars. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request for all calendars - ==== Optional Arguments The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-id] +include-tagged::{doc-tests-file}[{api}-id] -------------------------------------------------- <1> Construct a request for the single calendar `holidays` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-page] +include-tagged::{doc-tests-file}[{api}-page] -------------------------------------------------- <1> The page parameters `from` and `size`. `from` specifies the number of calendars to skip. `size` specifies the maximum number of calendars to get. Defaults to `0` and `100` respectively. -[[java-rest-high-x-pack-ml-get-calendars-execution]] -==== Execution -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execution] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-get-calendars-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execute-async] --------------------------------------------------- -<1> The `GetCalendarsRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `GetCalendarsResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-calendars-response]] +[id="{upid}-{api}-response"] ==== Get calendars Response -The returned `GetCalendarsResponse` contains the requested calendars: +The returned +{response}+ contains the requested calendars: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of calendars that were matched <2> The calendars retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-categories.asciidoc b/docs/java-rest/high-level/ml/get-categories.asciidoc index 2a5988a5cc6..0aa0c7696cc 100644 --- a/docs/java-rest/high-level/ml/get-categories.asciidoc +++ b/docs/java-rest/high-level/ml/get-categories.asciidoc @@ -1,18 +1,23 @@ -[[java-rest-high-x-pack-ml-get-categories]] +-- +:api: get-categories +:request: GetCategoriesRequest +:response: GetCategoriesResponse +-- +[id="{upid}-{api}"] === Get Categories API The Get Categories API retrieves one or more category results. -It accepts a `GetCategoriesRequest` object and responds -with a `GetCategoriesResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-categories-request]] +[id="{upid}-{api}-request"] ==== Get Categories Request -A `GetCategoriesRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -21,63 +26,27 @@ The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-category-id] +include-tagged::{doc-tests-file}[{api}-category-id] -------------------------------------------------- <1> The id of the category to get. Otherwise it will return all categories. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-page] +include-tagged::{doc-tests-file}[{api}-page] -------------------------------------------------- <1> The page parameters `from` and `size`. `from` specifies the number of categories to skip. `size` specifies the maximum number of categories to get. Defaults to `0` and `100` respectively. -[[java-rest-high-x-pack-ml-get-categories-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-execute] --------------------------------------------------- - - -[[java-rest-high-x-pack-ml-get-categories-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-execute-async] --------------------------------------------------- -<1> The `GetCategoriesRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `GetCategoriesResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-categories-response]] +[id="{upid}-{api}-response"] ==== Get Categories Response -The returned `GetCategoriesResponse` contains the requested categories: +The returned +{response}+ contains the requested categories: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of categories that were matched <2> The categories retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-datafeed-stats.asciidoc b/docs/java-rest/high-level/ml/get-datafeed-stats.asciidoc new file mode 100644 index 00000000000..47486669dfc --- /dev/null +++ b/docs/java-rest/high-level/ml/get-datafeed-stats.asciidoc @@ -0,0 +1,40 @@ +-- +:api: get-datafeed-stats +:request: GetDatafeedStatsRequest +:response: GetDatafeedStatsResponse +-- +[id="{upid}-{api}"] +=== Get Datafeed Stats API + +The Get Datafeed Stats API provides the ability to get any number of + {ml} datafeed's statistics in the cluster. +It accepts a +{request}+ object and responds +with a +{response}+ object. + +[id="{upid}-{api}-request"] +==== Get Datafeed Stats Request + +A +{request}+ object can have any number of `datafeedId` +entries. However, they all must be non-null. An empty list is the same as +requesting statistics for all datafeeds. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- +<1> Constructing a new request referencing existing `datafeedIds`, can contain wildcards +<2> Whether to ignore if a wildcard expression matches no datafeeds. + (This includes `_all` string or when no datafeeds have been specified) + +include::../execution.asciidoc[] + +[id="{upid}-{api}-response"] +==== Get Datafeed Stats Response +The returned +{response}+ contains the requested datafeed statistics: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> `count()` indicates the number of datafeeds statistics found +<2> `datafeedStats()` is the collection of {ml} `DatafeedStats` objects found \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-datafeed.asciidoc b/docs/java-rest/high-level/ml/get-datafeed.asciidoc index 8e5f0664c61..b624a84c86c 100644 --- a/docs/java-rest/high-level/ml/get-datafeed.asciidoc +++ b/docs/java-rest/high-level/ml/get-datafeed.asciidoc @@ -1,56 +1,37 @@ -[[java-rest-high-x-pack-ml-get-datafeed]] +-- +:api: get-datafeed +:request: GetDatafeedRequest +:response: GetDatafeedResponse +-- +[id="{upid}-{api}"] === Get Datafeed API The Get Datafeed API provides the ability to get {ml} datafeeds in the cluster. -It accepts a `GetDatafeedRequest` object and responds -with a `GetDatafeedResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-datafeed-request]] +[id="{upid}-{api}-request"] ==== Get Datafeed Request -A `GetDatafeedRequest` object gets can have any number of `datafeedId` entries. +A +{request}+ object gets can have any number of `datafeedId` entries. However, they all must be non-null. An empty list is the same as requesting for all datafeeds. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing existing `datafeedIds`, can contain wildcards <2> Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) -[[java-rest-high-x-pack-ml-get-datafeed-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. +[id="{upid}-{api}-response"] +==== Get Datafeed Response ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-execute] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of retrieved datafeeds <2> The retrieved datafeeds -[[java-rest-high-x-pack-ml-get-datafeed-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-execute-async] --------------------------------------------------- -<1> The `GetDatafeedRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `GetDatafeedResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-datafeed-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/get-influencers.asciidoc b/docs/java-rest/high-level/ml/get-influencers.asciidoc index 8b1ba061bcb..6167a62e5a3 100644 --- a/docs/java-rest/high-level/ml/get-influencers.asciidoc +++ b/docs/java-rest/high-level/ml/get-influencers.asciidoc @@ -1,18 +1,23 @@ -[[java-rest-high-x-pack-ml-get-influencers]] +-- +:api: get-influencers +:request: GetInfluencersRequest +:response: GetInfluencersResponse +-- +[id="{upid}-{api}"] === Get Influencers API The Get Influencers API retrieves one or more influencer results. -It accepts a `GetInfluencersRequest` object and responds -with a `GetInfluencersResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-influencers-request]] +[id="{upid}-{api}-request"] ==== Get Influencers Request -A `GetInfluencersRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -21,92 +26,57 @@ The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-desc] +include-tagged::{doc-tests-file}[{api}-desc] -------------------------------------------------- <1> If `true`, the influencers are sorted in descending order. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-end] +include-tagged::{doc-tests-file}[{api}-end] -------------------------------------------------- <1> Influencers with timestamps earlier than this time will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-exclude-interim] +include-tagged::{doc-tests-file}[{api}-exclude-interim] -------------------------------------------------- <1> If `true`, interim results will be excluded. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-influencer-score] +include-tagged::{doc-tests-file}[{api}-influencer-score] -------------------------------------------------- <1> Influencers with influencer_score greater or equal than this value will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-page] +include-tagged::{doc-tests-file}[{api}-page] -------------------------------------------------- <1> The page parameters `from` and `size`. `from` specifies the number of influencers to skip. `size` specifies the maximum number of influencers to get. Defaults to `0` and `100` respectively. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-sort] +include-tagged::{doc-tests-file}[{api}-sort] -------------------------------------------------- <1> The field to sort influencers on. Defaults to `influencer_score`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-start] +include-tagged::{doc-tests-file}[{api}-start] -------------------------------------------------- <1> Influencers with timestamps on or after this time will be returned. -[[java-rest-high-x-pack-ml-get-influencers-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-get-influencers-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-execute-async] --------------------------------------------------- -<1> The `GetInfluencersRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `GetInfluencersResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-influencers-response]] +[id="{upid}-{api}-response"] ==== Get Influencers Response -The returned `GetInfluencersResponse` contains the requested influencers: +The returned +{response}+ contains the requested influencers: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influencers-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of influencers that were matched <2> The influencers retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-job-stats.asciidoc b/docs/java-rest/high-level/ml/get-job-stats.asciidoc index 90f7794ae76..afea6f1a104 100644 --- a/docs/java-rest/high-level/ml/get-job-stats.asciidoc +++ b/docs/java-rest/high-level/ml/get-job-stats.asciidoc @@ -1,12 +1,17 @@ -[[java-rest-high-x-pack-ml-get-job-stats]] +-- +:api: get-job-stats +:request: GetJobStatsRequest +:response: GetJobStatsResponse +-- +[id="{upid}-{api}"] === Get Job Stats API The Get Job Stats API provides the ability to get any number of {ml} job's statistics in the cluster. -It accepts a `GetJobStatsRequest` object and responds -with a `GetJobStatsResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-job-stats-request]] +[id="{upid}-{api}-request"] ==== Get Job Stats Request A `GetJobsStatsRequest` object can have any number of `jobId` @@ -15,53 +20,21 @@ requesting statistics for all jobs. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing existing `jobIds`, can contain wildcards <2> Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) -[[java-rest-high-x-pack-ml-get-job-stats-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-get-job-stats-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute-async] --------------------------------------------------- -<1> The `GetJobsStatsRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `GetJobsStatsResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-job-stats-response]] +[id="{upid}-{api}-response"] ==== Get Job Stats Response -The returned `GetJobStatsResponse` contains the requested job statistics: +The returned +{response}+ contains the requested job statistics: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> `getCount()` indicates the number of jobs statistics found <2> `getJobStats()` is the collection of {ml} `JobStats` objects found \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-job.asciidoc b/docs/java-rest/high-level/ml/get-job.asciidoc index 4ecf70e8e65..f2e740897de 100644 --- a/docs/java-rest/high-level/ml/get-job.asciidoc +++ b/docs/java-rest/high-level/ml/get-job.asciidoc @@ -1,57 +1,38 @@ -[[java-rest-high-x-pack-ml-get-job]] +-- +:api: get-job +:request: GetJobRequest +:response: GetJobResponse +-- +[id="{upid}-{api}"] === Get Job API The Get Job API provides the ability to get {ml} jobs in the cluster. -It accepts a `GetJobRequest` object and responds -with a `GetJobResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-job-request]] +[id="{upid}-{api}-request"] ==== Get Job Request -A `GetJobRequest` object gets can have any number of `jobId` or `groupName` +A +{request}+ object gets can have any number of `jobId` or `groupName` entries. However, they all must be non-null. An empty list is the same as requesting for all jobs. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing existing `jobIds`, can contain wildcards <2> Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) -[[java-rest-high-x-pack-ml-get-job-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. +[id="{upid}-{api}-response"] +==== Get Job Response ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-execute] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- -<1> `getCount()` from the `GetJobResponse` indicates the number of jobs found +<1> `getCount()` from the +{response}+ indicates the number of jobs found <2> `getJobs()` is the collection of {ml} `Job` objects found -[[java-rest-high-x-pack-ml-get-job-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-execute-async] --------------------------------------------------- -<1> The `GetJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `GetJobResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc b/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc index 1e299dc2721..c3a2f4f250e 100644 --- a/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc +++ b/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc @@ -1,19 +1,24 @@ -[[java-rest-high-x-pack-ml-get-overall-buckets]] +-- +:api: get-overall-buckets +:request: GetOverallBucketsRequest +:response: GetOverallBucketsResponse +-- +[id="{upid}-{api}"] === Get Overall Buckets API The Get Overall Buckets API retrieves overall bucket results that summarize the bucket results of multiple jobs. -It accepts a `GetOverallBucketsRequest` object and responds -with a `GetOverallBucketsResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-overall-buckets-request]] +[id="{upid}-{api}-request"] ==== Get Overall Buckets Request -A `GetOverallBucketsRequest` object gets created with one or more `jobId`. +A +{request}+ object gets created with one or more `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing job IDs `jobId1` and `jobId2`. @@ -22,86 +27,51 @@ The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-bucket-span] +include-tagged::{doc-tests-file}[{api}-bucket-span] -------------------------------------------------- <1> The span of the overall buckets. Must be greater or equal to the jobs' largest `bucket_span`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-end] +include-tagged::{doc-tests-file}[{api}-end] -------------------------------------------------- <1> Overall buckets with timestamps earlier than this time will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-exclude-interim] +include-tagged::{doc-tests-file}[{api}-exclude-interim] -------------------------------------------------- <1> If `true`, interim results will be excluded. Overall buckets are interim if any of the job buckets within the overall bucket interval are interim. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-overall-score] +include-tagged::{doc-tests-file}[{api}-overall-score] -------------------------------------------------- <1> Overall buckets with overall scores greater or equal than this value will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-start] +include-tagged::{doc-tests-file}[{api}-start] -------------------------------------------------- <1> Overall buckets with timestamps on or after this time will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-top-n] +include-tagged::{doc-tests-file}[{api}-top-n] -------------------------------------------------- <1> The number of top job bucket scores to be used in the `overall_score` calculation. Defaults to `1`. -[[java-rest-high-x-pack-ml-get-overall-buckets-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-get-overall-buckets-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-execute-async] --------------------------------------------------- -<1> The `GetOverallBucketsRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `GetBucketsResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-overall-buckets-response]] +[id="{upid}-{api}-response"] ==== Get Overall Buckets Response -The returned `GetOverallBucketsResponse` contains the requested buckets: +The returned +{response}+ contains the requested buckets: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of overall buckets that were matched <2> The overall buckets retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/get-records.asciidoc b/docs/java-rest/high-level/ml/get-records.asciidoc index 7b8e6b13131..7543463c30b 100644 --- a/docs/java-rest/high-level/ml/get-records.asciidoc +++ b/docs/java-rest/high-level/ml/get-records.asciidoc @@ -1,18 +1,23 @@ -[[java-rest-high-x-pack-ml-get-records]] +-- +:api: get-records +:request: GetRecordsRequest +:response: GetRecordsResponse +-- +[id="{upid}-{api}"] === Get Records API The Get Records API retrieves one or more record results. -It accepts a `GetRecordsRequest` object and responds -with a `GetRecordsResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-get-records-request]] +[id="{upid}-{api}-request"] ==== Get Records Request -A `GetRecordsRequest` object gets created with an existing non-null `jobId`. +A +{request}+ object gets created with an existing non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` @@ -21,93 +26,57 @@ The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-desc] +include-tagged::{doc-tests-file}[{api}-desc] -------------------------------------------------- <1> If `true`, the records are sorted in descending order. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-end] +include-tagged::{doc-tests-file}[{api}-end] -------------------------------------------------- <1> Records with timestamps earlier than this time will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-exclude-interim] +include-tagged::{doc-tests-file}[{api}-exclude-interim] -------------------------------------------------- <1> If `true`, interim results will be excluded. Defaults to `false`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-page] +include-tagged::{doc-tests-file}[{api}-page] -------------------------------------------------- <1> The page parameters `from` and `size`. `from` specifies the number of records to skip. `size` specifies the maximum number of records to get. Defaults to `0` and `100` respectively. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-record-score] +include-tagged::{doc-tests-file}[{api}-record-score] -------------------------------------------------- <1> Records with record_score greater or equal than this value will be returned. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-sort] +include-tagged::{doc-tests-file}[{api}-sort] -------------------------------------------------- <1> The field to sort records on. Defaults to `record_score`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-start] +include-tagged::{doc-tests-file}[{api}-start] -------------------------------------------------- <1> Records with timestamps on or after this time will be returned. -[[java-rest-high-x-pack-ml-get-records-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-execute] --------------------------------------------------- - - -[[java-rest-high-x-pack-ml-get-records-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-execute-async] --------------------------------------------------- -<1> The `GetRecordsRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `GetRecordsResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-get-records-response]] +[id="{upid}-{api}-response"] ==== Get Records Response -The returned `GetRecordsResponse` contains the requested records: +The returned +{response}+ contains the requested records: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The count of records that were matched <2> The records retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/open-job.asciidoc b/docs/java-rest/high-level/ml/open-job.asciidoc index be6a518df19..9b3ec11a7cc 100644 --- a/docs/java-rest/high-level/ml/open-job.asciidoc +++ b/docs/java-rest/high-level/ml/open-job.asciidoc @@ -1,55 +1,36 @@ -[[java-rest-high-x-pack-ml-open-job]] +-- +:api: open-job +:request: OpenJobRequest +:response: OpenJobResponse +-- +[id="{upid}-{api}"] === Open Job API The Open Job API provides the ability to open {ml} jobs in the cluster. -It accepts a `OpenJobRequest` object and responds -with a `OpenJobResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-open-job-request]] +[id="{upid}-{api}-request"] ==== Open Job Request -An `OpenJobRequest` object gets created with an existing non-null `jobId`. +An +{request}+ object gets created with an existing non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `jobId` <2> Optionally setting the `timeout` value for how long the execution should wait for the job to be opened. -[[java-rest-high-x-pack-ml-open-job-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. +[id="{upid}-{api}-response"] +==== Open Job Response ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-execute] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- -<1> `isOpened()` from the `OpenJobResponse` indicates if the job was successfully +<1> `isOpened()` from the +{response}+ indicates if the job was successfully opened or not. -[[java-rest-high-x-pack-ml-open-job-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-execute-async] --------------------------------------------------- -<1> The `OpenJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `OpenJobResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs +include::../execution.asciidoc[] \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/post-data.asciidoc b/docs/java-rest/high-level/ml/post-data.asciidoc index 2c8ca8f18a3..fd51dc80696 100644 --- a/docs/java-rest/high-level/ml/post-data.asciidoc +++ b/docs/java-rest/high-level/ml/post-data.asciidoc @@ -1,27 +1,32 @@ -[[java-rest-high-x-pack-ml-post-data]] +-- +:api: post-data +:request: PostDataRequest +:response: PostDataResponse +-- +[id="{upid}-{api}"] === Post Data API The Post Data API provides the ability to post data to an open {ml} job in the cluster. -It accepts a `PostDataRequest` object and responds -with a `PostDataResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-post-data-request]] +[id="{upid}-{api}-request"] ==== Post Data Request -A `PostDataRequest` object gets created with an existing non-null `jobId` +A +{request}+ object gets created with an existing non-null `jobId` and the `XContentType` being sent. Individual docs can be added incrementally via the `PostDataRequest.JsonBuilder#addDoc` method. -These are then serialized and sent in bulk when passed to the `PostDataRequest`. +These are then serialized and sent in bulk when passed to the +{request}+. Alternatively, the serialized bulk content can be set manually, along with its `XContentType` -through one of the other `PostDataRequest` constructors. +through one of the other +{request}+ constructors. Only `XContentType.JSON` and `XContentType.SMILE` are supported. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Create a new `PostDataRequest.JsonBuilder` object for incrementally adding documents <2> Add a new document as a `Map` object @@ -34,53 +39,21 @@ The following arguments are optional. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request-options] +include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> Set the start of the bucket resetting time <2> Set the end of the bucket resetting time -[[java-rest-high-x-pack-ml-post-data-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-post-data-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute-async] --------------------------------------------------- -<1> The `PostDataRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `PostDataResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-post-data-response]] +[id="{upid}-{api}-response"] ==== Post Data Response -A `PostDataResponse` contains current data processing statistics. +A +{response}+ contains current data processing statistics. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> `getDataCounts()` a `DataCounts` object containing the current data processing counts. diff --git a/docs/java-rest/high-level/ml/preview-datafeed.asciidoc b/docs/java-rest/high-level/ml/preview-datafeed.asciidoc new file mode 100644 index 00000000000..5b812af8344 --- /dev/null +++ b/docs/java-rest/high-level/ml/preview-datafeed.asciidoc @@ -0,0 +1,34 @@ +-- +:api: preview-datafeed +:request: PreviewDatafeedRequest +:response: PreviewDatafeedResponse +-- +[id="{upid}-{api}"] +=== Preview Datafeed API + +The Preview Datafeed API provides the ability to preview a {ml} datafeed's data +in the cluster. It accepts a +{request}+ object and responds +with a +{response}+ object. + +[id="{upid}-{api}-request"] +==== Preview Datafeed Request + +A +{request}+ object is created referencing a non-null `datafeedId`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `datafeedId` + +[id="{upid}-{api}-response"] +==== Preview Datafeed Response + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> The raw +BytesReference+ of the data preview +<2> A +List>+ that represents the previewed data + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/put-calendar.asciidoc b/docs/java-rest/high-level/ml/put-calendar.asciidoc index 5d163f37eb4..defd72e35a0 100644 --- a/docs/java-rest/high-level/ml/put-calendar.asciidoc +++ b/docs/java-rest/high-level/ml/put-calendar.asciidoc @@ -1,65 +1,35 @@ -[[java-rest-high-x-pack-ml-put-calendar]] +-- +:api: put-calendar +:request: PutCalendarRequest +:response: PutCalendarResponse +-- +[id="{upid}-{api}"] === Put Calendar API Creates a new {ml} calendar. -The API accepts a `PutCalendarRequest` and responds -with a `PutCalendarResponse` object. +The API accepts a +{request}+ and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-put-calendar-request]] +[id="{upid}-{api}-request"] ==== Put Calendar Request -A `PutCalendarRequest` is constructed with a Calendar object +A +{request}+ is constructed with a Calendar object ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Create a request with the given Calendar -[[java-rest-high-x-pack-ml-put-calendar-response]] +[id="{upid}-{api}-response"] ==== Put Calendar Response -The returned `PutCalendarResponse` contains the created Calendar: +The returned +{response}+ contains the created Calendar: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The created Calendar -[[java-rest-high-x-pack-ml-put-calendar-execution]] -==== Execution -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-put-calendar-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-execute-async] --------------------------------------------------- -<1> The `PutCalendarResquest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back with the `onResponse` method -if the execution is successful or the `onFailure` method if the execution -failed. - -A typical listener for `PutCalendarResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/put-datafeed.asciidoc b/docs/java-rest/high-level/ml/put-datafeed.asciidoc index 86c9d631726..e9f66f0b61d 100644 --- a/docs/java-rest/high-level/ml/put-datafeed.asciidoc +++ b/docs/java-rest/high-level/ml/put-datafeed.asciidoc @@ -1,22 +1,27 @@ -[[java-rest-high-x-pack-ml-put-datafeed]] +-- +:api: put-datafeed +:request: PutDatafeedRequest +:response: PutDatafeedResponse +-- +[id="{upid}-{api}"] === Put Datafeed API The Put Datafeed API can be used to create a new {ml} datafeed -in the cluster. The API accepts a `PutDatafeedRequest` object -as a request and returns a `PutDatafeedResponse`. +in the cluster. The API accepts a +{request}+ object +as a request and returns a +{response}+. -[[java-rest-high-x-pack-ml-put-datafeed-request]] +[id="{upid}-{api}-request"] ==== Put Datafeed Request -A `PutDatafeedRequest` requires the following argument: +A +{request}+ requires the following argument: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> The configuration of the {ml} datafeed to create -[[java-rest-high-x-pack-ml-put-datafeed-config]] +[id="{upid}-{api}-config"] ==== Datafeed Configuration The `DatafeedConfig` object contains all the details about the {ml} datafeed @@ -26,7 +31,7 @@ A `DatafeedConfig` requires the following arguments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config] +include-tagged::{doc-tests-file}[{api}-config] -------------------------------------------------- <1> The datafeed ID and the job ID <2> The indices that contain the data to retrieve and feed into the job @@ -36,89 +41,52 @@ The following arguments are optional: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-chunking-config] +include-tagged::{doc-tests-file}[{api}-config-set-chunking-config] -------------------------------------------------- <1> Specifies how data searches are split into time chunks. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-frequency] +include-tagged::{doc-tests-file}[{api}-config-set-frequency] -------------------------------------------------- <1> The interval at which scheduled queries are made while the datafeed runs in real time. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-query] +include-tagged::{doc-tests-file}[{api}-config-set-query] -------------------------------------------------- <1> A query to filter the search results by. Defaults to the `match_all` query. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-query-delay] +include-tagged::{doc-tests-file}[{api}-config-set-query-delay] -------------------------------------------------- <1> The time interval behind real time that data is queried. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-script-fields] +include-tagged::{doc-tests-file}[{api}-config-set-script-fields] -------------------------------------------------- <1> Allows the use of script fields. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-create-datafeed-config-set-scroll-size] +include-tagged::{doc-tests-file}[{api}-config-set-scroll-size] -------------------------------------------------- <1> The `size` parameter used in the searches. -[[java-rest-high-x-pack-ml-put-datafeed-execution]] -==== Execution +include::../execution.asciidoc[] -The Put Datafeed API can be executed through a `MachineLearningClient` -instance. Such an instance can be retrieved from a `RestHighLevelClient` -using the `machineLearning()` method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-put-datafeed-response]] +[id="{upid}-{api}-response"] ==== Response -The returned `PutDatafeedResponse` returns the full representation of +The returned +{response}+ returns the full representation of the new {ml} datafeed if it has been successfully created. This will contain the creation time and other fields initialized using default values: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The created datafeed - -[[java-rest-high-x-pack-ml-put-datafeed-async]] -==== Asynchronous Execution - -This request can be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-execute-async] --------------------------------------------------- -<1> The `PutDatafeedRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `PutDatafeedResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-datafeed-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/ml/put-job.asciidoc b/docs/java-rest/high-level/ml/put-job.asciidoc index 8c726d63b16..9934fc6b94a 100644 --- a/docs/java-rest/high-level/ml/put-job.asciidoc +++ b/docs/java-rest/high-level/ml/put-job.asciidoc @@ -1,22 +1,27 @@ -[[java-rest-high-x-pack-ml-put-job]] +-- +:api: put-job +:request: PutJobRequest +:response: PutJobResponse +-- +[id="{upid}-{api}"] === Put Job API The Put Job API can be used to create a new {ml} job -in the cluster. The API accepts a `PutJobRequest` object -as a request and returns a `PutJobResponse`. +in the cluster. The API accepts a +{request}+ object +as a request and returns a +{response}+. -[[java-rest-high-x-pack-ml-put-job-request]] +[id="{upid}-{api}-request"] ==== Put Job Request -A `PutJobRequest` requires the following argument: +A +{request}+ requires the following argument: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> The configuration of the {ml} job to create as a `Job` -[[java-rest-high-x-pack-ml-put-job-config]] +[id="{upid}-{api}-config"] ==== Job Configuration The `Job` object contains all the details about the {ml} job @@ -26,14 +31,14 @@ A `Job` requires the following arguments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-config] +include-tagged::{doc-tests-file}[{api}-config] -------------------------------------------------- <1> The job ID <2> An analysis configuration <3> A data description <4> Optionally, a human-readable description -[[java-rest-high-x-pack-ml-put-job-analysis-config]] +[id="{upid}-{api}-analysis-config"] ==== Analysis Configuration The analysis configuration of the {ml} job is defined in the `AnalysisConfig`. @@ -64,7 +69,7 @@ An example of building a `Detector` instance is as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-detector] +include-tagged::{doc-tests-file}[{api}-detector] -------------------------------------------------- <1> The function to use <2> The field to apply the function to @@ -74,13 +79,13 @@ Then the same configuration would be: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-analysis-config] +include-tagged::{doc-tests-file}[{api}-analysis-config] -------------------------------------------------- <1> Create a list of detectors <2> Pass the list of detectors to the analysis config builder constructor <3> The bucket span -[[java-rest-high-x-pack-ml-put-job-data-description]] +[id="{upid}-{api}-data-description"] ==== Data Description After defining the analysis config, the next thing to define is the @@ -103,59 +108,22 @@ configuration would be: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-data-description] +include-tagged::{doc-tests-file}[{api}-data-description] -------------------------------------------------- <1> The time field -[[java-rest-high-x-pack-ml-put-job-execution]] -==== Execution +include::../execution.asciidoc[] -The Put Job API can be executed through a `MachineLearningClient` -instance. Such an instance can be retrieved from a `RestHighLevelClient` -using the `machineLearning()` method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-put-job-response]] +[id="{upid}-{api}-response"] ==== Response -The returned `PutJobResponse` returns the full representation of +The returned +{response}+ returns the full representation of the new {ml} job if it has been successfully created. This will contain the creation time and other fields initialized using default values: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The creation time is a field that was not passed in the `Job` object in the request - -[[java-rest-high-x-pack-ml-put-job-async]] -==== Asynchronous Execution - -This request can be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-async] --------------------------------------------------- -<1> The `PutJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `PutJobResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/ml/start-datafeed.asciidoc b/docs/java-rest/high-level/ml/start-datafeed.asciidoc index 6bef621562e..9c3b096634d 100644 --- a/docs/java-rest/high-level/ml/start-datafeed.asciidoc +++ b/docs/java-rest/high-level/ml/start-datafeed.asciidoc @@ -1,19 +1,24 @@ -[[java-rest-high-x-pack-ml-start-datafeed]] +-- +:api: start-datafeed +:request: StartDatafeedRequest +:response: StartDatafeedResponse +-- +[id="{upid}-{api}"] === Start Datafeed API The Start Datafeed API provides the ability to start a {ml} datafeed in the cluster. -It accepts a `StartDatafeedRequest` object and responds -with a `StartDatafeedResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-start-datafeed-request]] +[id="{upid}-{api}-request"] ==== Start Datafeed Request -A `StartDatafeedRequest` object is created referencing a non-null `datafeedId`. +A +{request}+ object is created referencing a non-null `datafeedId`. All other fields are optional for the request. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing an existing `datafeedId` @@ -23,7 +28,7 @@ The following arguments are optional. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-request-options] +include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> Set when the datafeed should end, the value is exclusive. May be an epoch seconds, epoch millis or an ISO 8601 string. @@ -35,37 +40,4 @@ If you do not specify a start time and the datafeed is associated with a new job the analysis starts from the earliest time for which data is available. <3> Set the timeout for the request -[[java-rest-high-x-pack-ml-start-datafeed-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-execute] --------------------------------------------------- -<1> Did the datafeed successfully start? - -[[java-rest-high-x-pack-ml-start-datafeed-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-execute-async] --------------------------------------------------- -<1> The `StartDatafeedRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `StartDatafeedResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/ml/stop-datafeed.asciidoc b/docs/java-rest/high-level/ml/stop-datafeed.asciidoc index 4e07d9a2e19..211d1c5ad7a 100644 --- a/docs/java-rest/high-level/ml/stop-datafeed.asciidoc +++ b/docs/java-rest/high-level/ml/stop-datafeed.asciidoc @@ -1,20 +1,25 @@ -[[java-rest-high-x-pack-ml-stop-datafeed]] +-- +:api: stop-datafeed +:request: StopDatafeedRequest +:response: StopDatafeedResponse +-- +[id="{upid}-{api}"] === Stop Datafeed API The Stop Datafeed API provides the ability to stop a {ml} datafeed in the cluster. -It accepts a `StopDatafeedRequest` object and responds -with a `StopDatafeedResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-stop-datafeed-request]] +[id="{upid}-{api}-request"] ==== Stop Datafeed Request -A `StopDatafeedRequest` object is created referencing any number of non-null `datafeedId` entries. +A +{request}+ object is created referencing any number of non-null `datafeedId` entries. Wildcards and `_all` are also accepted. All other fields are optional for the request. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing existing `datafeedId` entries. @@ -24,43 +29,10 @@ The following arguments are optional. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-request-options] +include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string) <2> If true, the datafeed is stopped forcefully. <3> Controls the amount of time to wait until a datafeed stops. The default value is 20 seconds. -[[java-rest-high-x-pack-ml-stop-datafeed-execution]] -==== Execution - -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-execute] --------------------------------------------------- -<1> Did the datafeed successfully stop? - -[[java-rest-high-x-pack-ml-stop-datafeed-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-execute-async] --------------------------------------------------- -<1> The `StopDatafeedRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `StopDatafeedResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs +include::../execution.asciidoc[] \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/update-job.asciidoc b/docs/java-rest/high-level/ml/update-job.asciidoc index 3e1d1e2313b..90f9cf85c48 100644 --- a/docs/java-rest/high-level/ml/update-job.asciidoc +++ b/docs/java-rest/high-level/ml/update-job.asciidoc @@ -1,18 +1,23 @@ -[[java-rest-high-x-pack-ml-update-job]] +-- +:api: update-job +:request: UpdateJobRequest +:response: PutJobResponse +-- +[id="{upid}-{api}"] === Update Job API The Update Job API provides the ability to update a {ml} job. -It accepts a `UpdateJobRequest` object and responds -with a `PutJobResponse` object. +It accepts a +{request}+ object and responds +with a +{response}+ object. -[[java-rest-high-x-pack-ml-update-job-request]] +[id="{upid}-{api}-request"] ==== Update Job Request -An `UpdateJobRequest` object gets created with a `JobUpdate` object. +An +{request}+ object gets created with a `JobUpdate` object. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new request referencing a `JobUpdate` object @@ -23,7 +28,7 @@ job. An existing, non-null `jobId` must be referenced in its creation. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-options] +include-tagged::{doc-tests-file}[{api}-options] -------------------------------------------------- <1> Mandatory, non-null `jobId` referencing an existing {ml} job <2> Updated description @@ -41,53 +46,21 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-op Included with these options are specific optional `JobUpdate.DetectorUpdate` updates. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-detector-options] +include-tagged::{doc-tests-file}[{api}-detector-options] -------------------------------------------------- <1> The index of the detector. `O` means unknown <2> The optional description of the detector <3> The `DetectionRule` rules that apply to this detector -[[java-rest-high-x-pack-ml-update-job-execution]] -==== Execution +include::../execution.asciidoc[] -The request can be executed through the `MachineLearningClient` contained -in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-execute] --------------------------------------------------- - -[[java-rest-high-x-pack-ml-update-job-execution-async]] -==== Asynchronous Execution - -The request can also be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-execute-async] --------------------------------------------------- -<1> The `UpdateJobRequest` to execute and the `ActionListener` to use when -the execution completes - -The method does not block and returns immediately. The passed `ActionListener` is used -to notify the caller of completion. A typical `ActionListener` for `PutJobResponse` may -look like - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-listener] --------------------------------------------------- -<1> `onResponse` is called back when the action is completed successfully -<2> `onFailure` is called back when some unexpected error occurs - -[[java-rest-high-x-pack-ml-update-job-response]] +[id="{upid}-{api}-response"] ==== Update Job Response -A `PutJobResponse` contains the updated `Job` object +A +{response}+ contains the updated `Job` object ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> `getResponse()` returns the updated `Job` object diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index bb326cbb9c6..4046a6fced1 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -11,6 +11,7 @@ The Java High Level REST Client supports the following Document APIs: Single document APIs:: * <<{upid}-index>> * <<{upid}-get>> +* <<{upid}-exists>> * <<{upid}-delete>> * <<{upid}-update>> @@ -219,33 +220,37 @@ include::licensing/get-license.asciidoc[] include::licensing/delete-license.asciidoc[] == Machine Learning APIs +:upid: {mainid}-x-pack-ml +:doc-tests-file: {doc-tests}/MlClientDocumentationIT.java The Java High Level REST Client supports the following Machine Learning APIs: -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> +* <<{upid}-put-job>> +* <<{upid}-get-job>> +* <<{upid}-delete-job>> +* <<{upid}-open-job>> +* <<{upid}-close-job>> +* <<{upid}-flush-job>> +* <<{upid}-update-job>> +* <<{upid}-get-job-stats>> +* <<{upid}-put-datafeed>> +* <<{upid}-get-datafeed>> +* <<{upid}-delete-datafeed>> +* <<{upid}-preview-datafeed>> +* <<{upid}-start-datafeed>> +* <<{upid}-stop-datafeed>> +* <<{upid}-get-datafeed-stats>> +* <<{upid}-forecast-job>> +* <<{upid}-delete-forecast>> +* <<{upid}-get-buckets>> +* <<{upid}-get-overall-buckets>> +* <<{upid}-get-records>> +* <<{upid}-post-data>> +* <<{upid}-get-influencers>> +* <<{upid}-get-categories>> +* <<{upid}-get-calendars>> +* <<{upid}-put-calendar>> +* <<{upid}-delete-calendar>> include::ml/put-job.asciidoc[] include::ml/get-job.asciidoc[] @@ -257,8 +262,10 @@ include::ml/flush-job.asciidoc[] include::ml/put-datafeed.asciidoc[] include::ml/get-datafeed.asciidoc[] include::ml/delete-datafeed.asciidoc[] +include::ml/preview-datafeed.asciidoc[] include::ml/start-datafeed.asciidoc[] include::ml/stop-datafeed.asciidoc[] +include::ml/get-datafeed-stats.asciidoc[] include::ml/get-job-stats.asciidoc[] include::ml/forecast-job.asciidoc[] include::ml/delete-forecast.asciidoc[] @@ -306,15 +313,20 @@ include::security/change-password.asciidoc[] == Watcher APIs +:upid: {mainid}-watcher +:doc-tests-file: {doc-tests}/WatcherDocumentationIT.java + The Java High Level REST Client supports the following Watcher APIs: * <> * <> -* <> +* <<{upid}-ack-watch>> +* <<{upid}-activate-watch>> include::watcher/put-watch.asciidoc[] include::watcher/delete-watch.asciidoc[] include::watcher/ack-watch.asciidoc[] +include::watcher/activate-watch.asciidoc[] == Graph APIs diff --git a/docs/java-rest/high-level/watcher/ack-watch.asciidoc b/docs/java-rest/high-level/watcher/ack-watch.asciidoc index 13b62ba3be8..46a51679859 100644 --- a/docs/java-rest/high-level/watcher/ack-watch.asciidoc +++ b/docs/java-rest/high-level/watcher/ack-watch.asciidoc @@ -1,7 +1,13 @@ -[[java-rest-high-watcher-ack-watch]] +-- +:api: ack-watch +:request: AckWatchRequest +:response: AckWatchResponse +-- + +[id="{upid}-{api}"] === Ack Watch API -[[java-rest-high-watcher-ack-watch-execution]] +[id="{upid}-{api}-request"] ==== Execution {xpack-ref}/actions.html#actions-ack-throttle[Acknowledging a watch] enables you @@ -10,48 +16,23 @@ through the following request: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> The ID of the watch to ack. <2> An optional list of IDs representing the watch actions that should be acked. If no action IDs are provided, then all of the watch's actions will be acked. -[[java-rest-high-watcher-ack-watch-response]] +[id="{upid}-{api}-response"] ==== Response -The returned `AckWatchResponse` contains the new status of the requested watch: +The returned +{response}+ contains the new status of the requested watch: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The status of a specific action that was acked. <2> The acknowledgement state of the action. If the action was successfully acked, this state will be equal to `AckStatus.State.ACKED`. -[[java-rest-high-watcher-ack-watch-async]] -==== Asynchronous Execution - -This request can be executed asynchronously: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-async] --------------------------------------------------- -<1> The `AckWatchRequest` to execute and the `ActionListener` to use when -the execution completes. - -The asynchronous method does not block and returns immediately. Once the request -completes, the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A listener for `AckWatchResponse` can be constructed as follows: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument. -<2> Called in case of failure. The raised exception is provided as an argument. +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/watcher/activate-watch.asciidoc b/docs/java-rest/high-level/watcher/activate-watch.asciidoc new file mode 100644 index 00000000000..52124ccb6ed --- /dev/null +++ b/docs/java-rest/high-level/watcher/activate-watch.asciidoc @@ -0,0 +1,56 @@ +-- +:api: activate-watch +:request: ActivateWatchRequest +:response: ActivateWatchResponse +-- + +[id="{upid}-{api}"] +=== Activate Watch API + +[id="{upid}-{api}-request"] +==== Execution + +A watch can be activated as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +[id="{upid}-{api}-response"] +==== Response + +The returned +{response}+ contains the new status of the activated watch. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> `watchStatus` contains status of the watch + +[id="{upid}-{api}-request-async"] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-async] +-------------------------------------------------- +<1> The +{request}+ to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for +{response}+ looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/low-level/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc index 71fadd98988..38104215720 100644 --- a/docs/java-rest/low-level/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -307,7 +307,7 @@ You can also customize the response consumer used to buffer the asynchronous responses. The default consumer will buffer up to 100MB of response on the JVM heap. If the response is larger then the request will fail. You could, for example, lower the maximum size which might be useful if you are running -in a heap constrained environment like the exmaple above. +in a heap constrained environment like the example above. Once you've created the singleton you can use it when making requests: diff --git a/docs/painless/painless-api-reference.asciidoc b/docs/painless/painless-api-reference.asciidoc index 54b1f20977b..814824b0db9 100644 --- a/docs/painless/painless-api-reference.asciidoc +++ b/docs/painless/painless-api-reference.asciidoc @@ -3,7 +3,7 @@ Painless has a strict whitelist for methods and classes to ensure all painless scripts are secure. Most of these methods are exposed directly -from the Java Runtime Enviroment (JRE) while others are part of +from the Java Runtime Environment (JRE) while others are part of Elasticsearch or Painless itself. Below is a list of all available classes grouped with their respected methods. Clicking on the method name takes you to the documentation for that specific method. Methods diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index ffd5ecebc25..1b975ef761d 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -32,7 +32,7 @@ PUT _snapshot/my_hdfs_repository "type": "hdfs", "settings": { "uri": "hdfs://namenode:8020/", - "path": "elasticsearch/respositories/my_hdfs_repository", + "path": "elasticsearch/repositories/my_hdfs_repository", "conf.dfs.client.read.shortcircuit": "true" } } @@ -149,7 +149,7 @@ PUT _snapshot/my_hdfs_repository "type": "hdfs", "settings": { "uri": "hdfs://namenode:8020/", - "path": "/user/elasticsearch/respositories/my_hdfs_repository", + "path": "/user/elasticsearch/repositories/my_hdfs_repository", "security.principal": "elasticsearch@REALM" } } @@ -167,7 +167,7 @@ PUT _snapshot/my_hdfs_repository "type": "hdfs", "settings": { "uri": "hdfs://namenode:8020/", - "path": "/user/elasticsearch/respositories/my_hdfs_repository", + "path": "/user/elasticsearch/repositories/my_hdfs_repository", "security.principal": "elasticsearch/_HOST@REALM" } } @@ -186,4 +186,4 @@ extracts for file access checks will be `elasticsearch`. NOTE: The repository plugin makes no assumptions of what Elasticsearch's principal name is. The main fragment of the Kerberos principal is not required to be `elasticsearch`. If you have a principal or service name that works better -for you or your organization then feel free to use it instead! \ No newline at end of file +for you or your organization then feel free to use it instead! diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index 0726f5f927e..20148e07eb7 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -1,8 +1,6 @@ [[search-aggregations-bucket-composite-aggregation]] === Composite Aggregation -beta[] - A multi-bucket aggregation that creates composite buckets from different sources. Unlike the other `multi-bucket` aggregation the `composite` aggregation can be used diff --git a/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc b/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc index fa4d94c2327..47bc00e3d8c 100644 --- a/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc @@ -89,7 +89,7 @@ Response: } } -------------------------------------------------- -// NOTCONSOLE +// TESTRESPONSE[skip:historically skipped] The results show that "h5n1" is one of several terms strongly associated with bird flu. It only occurs 5 times in our index as a whole (see the `bg_count`) and yet 4 of these diff --git a/docs/reference/analysis/normalizers.asciidoc b/docs/reference/analysis/normalizers.asciidoc index e4bd710900c..1d4d6e74213 100644 --- a/docs/reference/analysis/normalizers.asciidoc +++ b/docs/reference/analysis/normalizers.asciidoc @@ -1,8 +1,6 @@ [[analysis-normalizers]] == Normalizers -beta[] - Normalizers are similar to analyzers except that they may only emit a single token. As a consequence, they do not have a tokenizer and only accept a subset of the available char filters and token filters. Only the filters that work on diff --git a/docs/reference/cluster/tasks.asciidoc b/docs/reference/cluster/tasks.asciidoc index c84f4c43ae8..b429edcd9de 100644 --- a/docs/reference/cluster/tasks.asciidoc +++ b/docs/reference/cluster/tasks.asciidoc @@ -132,6 +132,19 @@ number of requests and the destination indices. Many requests will only have an empty description because more detailed information about the request is not easily available or particularly helpful in identifying the request. +[IMPORTANT] +============================== + +`_tasks` requests with `detailed` may also return a `status`. This is a report +of the internal status of the task. As such its format varies from task to task. +While we try to keep the `status` for a particular task consistent from version +to version this isn't always possible because we sometimes change the +implementation. In that case we might remove fields from the `status` for a +particular request so any parsing you do of the status might break in minor +releases. + +============================== + The task API can also be used to wait for completion of a particular task. The following call will block for 10 seconds or until the task with id `oTUltX4IQMOUUVeiohTt8A:12345` is completed. diff --git a/docs/reference/commands/certutil.asciidoc b/docs/reference/commands/certutil.asciidoc index e0c6c701e31..4b04f95445e 100644 --- a/docs/reference/commands/certutil.asciidoc +++ b/docs/reference/commands/certutil.asciidoc @@ -72,7 +72,7 @@ parameter or in the `filename` field in an input YAML file. You can optionally provide IP addresses or DNS names for each instance. If neither IP addresses nor DNS names are specified, the Elastic stack products cannot perform hostname verification and you might need to configure the -`verfication_mode` security setting to `certificate` only. For more information +`verification_mode` security setting to `certificate` only. For more information about this setting, see <>. All certificates that are generated by this command are signed by a CA. You can diff --git a/docs/reference/index-modules/index-sorting.asciidoc b/docs/reference/index-modules/index-sorting.asciidoc index 9d1dfcb1a75..30c28165af8 100644 --- a/docs/reference/index-modules/index-sorting.asciidoc +++ b/docs/reference/index-modules/index-sorting.asciidoc @@ -1,8 +1,6 @@ [[index-modules-index-sorting]] == Index Sorting -beta[] - When creating a new index in Elasticsearch it is possible to configure how the Segments inside each Shard will be sorted. By default Lucene does not apply any sort. The `index.sort.*` settings define which fields should be used to sort the documents inside each Segment. diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc index de312c1c723..01d8805c896 100644 --- a/docs/reference/migration/migrate_7_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc @@ -14,6 +14,15 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly parse `long` fields into a date object using `Instance.ofEpochMillis(doc["myfield"].value)`. +[float] +==== Accessing missing document values will throw an error +`doc['field'].value` will throw an exception if +the document is missing a value for the field `field`. + +To check if a document is missing a value, you can use +`doc['field'].size() == 0`. + + [float] ==== Script errors will return as `400` error codes diff --git a/docs/reference/ml/apis/delete-job.asciidoc b/docs/reference/ml/apis/delete-job.asciidoc index d5ef120ad04..b9dbe9e3cd6 100644 --- a/docs/reference/ml/apis/delete-job.asciidoc +++ b/docs/reference/ml/apis/delete-job.asciidoc @@ -41,6 +41,9 @@ separated list. (boolean) Use to forcefully delete an opened job; this method is quicker than closing and deleting the job. +`wait_for_completion`:: + (boolean) Specifies whether the request should return immediately or wait + until the job deletion completes. Defaults to `true`. ==== Authorization @@ -66,4 +69,23 @@ When the job is deleted, you receive the following results: "acknowledged": true } ---- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE + +In the next example we delete the `total-requests` job asynchronously: + +[source,js] +-------------------------------------------------- +DELETE _xpack/ml/anomaly_detectors/total-requests?wait_for_completion=false +-------------------------------------------------- +// CONSOLE +// TEST[skip:setup:server_metrics_job] + +When `wait_for_completion` is set to `false`, the response contains the id +of the job deletion task: +[source,js] +---- +{ + "task": "oTUltX4IQMOUUVeiohTt8A:39" +} +---- +// TESTRESPONSE[s/"task": "oTUltX4IQMOUUVeiohTt8A:39"/"task": $body.task/] \ No newline at end of file diff --git a/docs/reference/ml/apis/find-file-structure.asciidoc b/docs/reference/ml/apis/find-file-structure.asciidoc index e72555d2723..5bd32750685 100644 --- a/docs/reference/ml/apis/find-file-structure.asciidoc +++ b/docs/reference/ml/apis/find-file-structure.asciidoc @@ -613,6 +613,20 @@ If the request does not encounter errors, you receive the following result: "type" : "double" } }, + "ingest_pipeline" : { + "description" : "Ingest pipeline created by file structure finder", + "processors" : [ + { + "date" : { + "field" : "tpep_pickup_datetime", + "timezone" : "{{ beat.timezone }}", + "formats" : [ + "YYYY-MM-dd HH:mm:ss" + ] + } + } + ] + }, "field_stats" : { "DOLocationID" : { "count" : 19998, @@ -1366,6 +1380,33 @@ this: "type" : "text" } }, + "ingest_pipeline" : { + "description" : "Ingest pipeline created by file structure finder", + "processors" : [ + { + "grok" : { + "field" : "message", + "patterns" : [ + "\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel}.*" + ] + } + }, + { + "date" : { + "field" : "timestamp", + "timezone" : "{{ beat.timezone }}", + "formats" : [ + "ISO8601" + ] + } + }, + { + "remove" : { + "field" : "timestamp" + } + } + ] + }, "field_stats" : { "loglevel" : { "count" : 53, @@ -1499,6 +1540,33 @@ this: "type" : "keyword" } }, + "ingest_pipeline" : { + "description" : "Ingest pipeline created by file structure finder", + "processors" : [ + { + "grok" : { + "field" : "message", + "patterns" : [ + "\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} *\\]\\[%{JAVACLASS:class} *\\] \\[%{HOSTNAME:node}\\] %{JAVALOGMESSAGE:message}" + ] + } + }, + { + "date" : { + "field" : "timestamp", + "timezone" : "{{ beat.timezone }}", + "formats" : [ + "ISO8601" + ] + } + }, + { + "remove" : { + "field" : "timestamp" + } + } + ] + }, "field_stats" : { <2> "class" : { "count" : 53, diff --git a/docs/reference/ml/apis/jobcounts.asciidoc b/docs/reference/ml/apis/jobcounts.asciidoc index d0169e228d5..e6af7ac569c 100644 --- a/docs/reference/ml/apis/jobcounts.asciidoc +++ b/docs/reference/ml/apis/jobcounts.asciidoc @@ -207,7 +207,7 @@ The `forecasts_stats` object shows statistics about forecasts. It has the follow (object) Counts per forecast status, for example: {"finished" : 2}. NOTE: `memory_bytes`, `records`, `processing_time_ms` and `status` require at least 1 forecast, otherwise -these fields are ommitted. +these fields are omitted. [float] [[ml-stats-node]] diff --git a/docs/reference/ml/apis/resultsresource.asciidoc b/docs/reference/ml/apis/resultsresource.asciidoc index d3abd094be7..9aac36fc87a 100644 --- a/docs/reference/ml/apis/resultsresource.asciidoc +++ b/docs/reference/ml/apis/resultsresource.asciidoc @@ -364,6 +364,11 @@ A record object has the following properties: //In scientific notation, a value of 3.24E-300 is highly unlikely and therefore //highly anomalous. +`multi_bucket_impact`:: + (number) an indication of how strongly an anomaly is multi bucket or single bucket. + The value is on a scale of -5 to +5 where -5 means the anomaly is purely single + bucket and +5 means the anomaly is purely multi bucket. + `record_score`:: (number) A normalized score between 0-100, which is based on the probability of the anomalousness of this record. Unlike `initial_record_score`, this diff --git a/docs/reference/query-dsl/geo-queries.asciidoc b/docs/reference/query-dsl/geo-queries.asciidoc index e60e4bcf615..5220b00101e 100644 --- a/docs/reference/query-dsl/geo-queries.asciidoc +++ b/docs/reference/query-dsl/geo-queries.asciidoc @@ -4,13 +4,13 @@ Elasticsearch supports two types of geo data: <> fields which support lat/lon pairs, and <> fields, which support points, -lines, circles, polygons, multi-polygons etc. +lines, circles, polygons, multi-polygons, etc. The queries in this group are: <> query:: - Find document with geo-shapes which either intersect, are contained by, or + Finds documents with geo-shapes which either intersect, are contained by, or do not intersect with the specified geo-shape. <> query:: @@ -19,7 +19,7 @@ The queries in this group are: <> query:: - Finds document with geo-points within the specified distance of a central + Finds documents with geo-points within the specified distance of a central point. <> query:: diff --git a/docs/reference/query-dsl/terms-set-query.asciidoc b/docs/reference/query-dsl/terms-set-query.asciidoc index 29b349c3b7a..4067a1e847e 100644 --- a/docs/reference/query-dsl/terms-set-query.asciidoc +++ b/docs/reference/query-dsl/terms-set-query.asciidoc @@ -1,8 +1,6 @@ [[query-dsl-terms-set-query]] === Terms Set Query -experimental[The terms_set query is a new query and its syntax may change in the future] - Returns any documents that match with at least one or more of the provided terms. The terms are not analyzed and thus must match exactly. The number of terms that must match varies per document and is either diff --git a/docs/reference/rollup/apis/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc index 907efb94c17..274037cae8f 100644 --- a/docs/reference/rollup/apis/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -45,7 +45,7 @@ For more information, see ==== Examples Imagine we have an index named `sensor-1` full of raw data. We know that the data will grow over time, so there -will be a `sensor-2`, `sensor-3`, etc. Let's create a Rollup job that targets the index pattern `sensor-*` to accomodate +will be a `sensor-2`, `sensor-3`, etc. Let's create a Rollup job that targets the index pattern `sensor-*` to accommodate this future scaling: [source,js] diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 544bea86b0d..1875c402ada 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -127,6 +127,9 @@ field support has a `nested` sort option with the following properties: should match with in order for its field values to be taken into account by sorting. Common case is to repeat the query / filter inside the nested filter or query. By default no `nested_filter` is active. +`max_children`:: + The maximum number of children to consider per root document + when picking the sort value. Defaults to unlimited. `nested`:: Same as top-level `nested` but applies to another nested path within the current nested object. diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index ec9b8e31af2..6773484e07f 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -120,6 +120,13 @@ can specify the following email account attributes: If `true`, then `STARTTLS` will be required. If that command fails, the connection will fail. Defaults to `false`. + `smtp.ssl.trust`;; + A list of SMTP server hosts that are assumed trusted and for which + certificate verification is disabled. If set to "*", all hosts are + trusted. If set to a whitespace separated list of hosts, those hosts + are trusted. Otherwise, trust depends on the certificate the server + presents. + `smtp.timeout`;; The socket read timeout. Default is two minutes. diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 2176d0d3ee6..8d5c832adcc 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -55,6 +55,7 @@ Enables fips mode of operation. Set this to `true` if you run this {es} instance `xpack.security.authc.accept_default_password`:: In `elasticsearch.yml`, set this to `false` to disable support for the default "changeme" password. +[float] [[password-hashing-settings]] ==== Password hashing settings `xpack.security.authc.password_hashing.algorithm`:: @@ -82,6 +83,33 @@ resource. When set to `false`, an HTTP 401 response is returned and the user can provide credentials with the appropriate permissions to gain access. Defaults to `true`. +[float] +[[security-automata-settings]] +==== Automata Settings +In places where {security} accepts wildcard patterns (e.g. index patterns in +roles, group matches in the role mapping API), each pattern is compiled into +an Automaton. The follow settings are available to control this behaviour. + +`xpack.security.automata.max_determinized_states`:: +The upper limit on how many automaton states may be created by a single pattern. +This protects against too-difficult (e.g. exponentially hard) patterns. +Defaults to `100,000`. + +`xpack.security.automata.cache.enabled`:: +Whether to cache the compiled automata. Compiling automata can be CPU intensive +and may slowdown some operations. The cache reduces the frequency with which +automata need to be compiled. +Defaults to `true`. + +`xpack.security.automata.cache.size`:: +The maximum number of items to retain in the automata cache. +Defaults to `10,000`. + +`xpack.security.automata.cache.ttl`:: +The length of time to retain in an item in the automata cache (based on most +recent usage). +Defaults to `48h` (48 hours). + [float] [[field-document-security-settings]] ==== Document and field level security settings @@ -176,6 +204,11 @@ cache at any given time. Defaults to 100,000. in-memory cached user credentials. For possible values, see <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. [[ref-users-settings]] @@ -200,6 +233,12 @@ Defaults to 100,000. (Expert Setting) The hashing algorithm that is used for the in-memory cached user credentials. See <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. + [[ref-ldap-settings]] [float] ===== LDAP realm settings @@ -400,8 +439,9 @@ The path to the Java Keystore file that contains a private key and certificate. `ssl.key` and `ssl.keystore.path` may not be used at the same time. `ssl.keystore.type`:: -The format of the keystore file. Should be either `jks` to use the Java -Keystore format, or `PKCS12` to use PKCS#12 files. The default is `jks`. +The format of the keystore file. Should be `jks` to use the Java +Keystore format, `PKCS12` to use PKCS#12 files, or `PKCS11` to use a PKCS#11 token. +The default is `jks`. `ssl.keystore.password`:: The password to the keystore. @@ -426,8 +466,9 @@ The password to the truststore. The password to the truststore. `ssl.truststore.type`:: -The format of the keystore file. Should be either `jks` to use the Java -Keystore format, or `PKCS12` to use PKCS#12 files. The default is `jks`. +The format of the keystore file. Should be `jks` to use the Java +Keystore format, `PKCS12` to use PKCS#12 files, or `PKCS11` to use a PKCS#11 token. +The default is `jks`. `ssl.verification_mode`:: Indicates the type of verification when using `ldaps` to protect against man @@ -460,6 +501,12 @@ Defaults to `100000`. (Expert Setting) Specifies the hashing algorithm that is used for the in-memory cached user credentials. See <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. + [[ref-ad-settings]] [float] ===== Active Directory realm settings @@ -649,8 +696,9 @@ The path to the Java Keystore file that contains a private key and certificate. `ssl.key` and `ssl.keystore.path` cannot be used at the same time. `ssl.keystore.type`:: -The format of the keystore file. Should be either `jks` to use the Java -Keystore format, or `PKCS12` to use PKCS#12 files. The default is `jks`. +The format of the keystore file. Should be `jks` to use the Java +Keystore format, `PKCS12` to use PKCS#12 files, or `PKCS11` to use a PKCS#11 token. +The default is `jks`. `ssl.truststore.password`:: The password to the truststore. @@ -664,8 +712,9 @@ The path to the Java Keystore file that contains the certificates to trust. same time. `ssl.truststore.type`:: -The format of the truststore file. Should be either `jks` to use the Java -Keystore format, or `PKCS12` to use PKCS#12 files. The default is `jks`. +The format of the truststore file. Should be `jks` to use the Java +Keystore format, `PKCS12` to use PKCS#12 files, or `PKCS11` to use a PKCS#11 token. +The default is `jks`. `ssl.verification_mode`:: Indicates the type of verification when using `ldaps` to protect against man @@ -699,6 +748,12 @@ Defaults to `100000`. (Expert Setting) Specifies the hashing algorithm that is used for the in-memory cached user credentials. See <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. + `follow_referrals`:: If set to `true` {security} follows referrals returned by the LDAP server. Referrals are URLs returned by the server that are to be used to continue the @@ -1062,7 +1117,7 @@ Must be either a Java Keystore (jks) or a PKCS#12 file. same time. `ssl.truststore.type`:: -The type of the truststore (`ssl.truststore.path`). Must be either `jks` or +The type of the truststore (`ssl.truststore.path`). Must be either `jks` or `PKCS12`. If the keystore path ends in ".p12", ".pfx" or "pkcs12", this setting defaults to `PKCS12`. Otherwise, it defaults to `jks`. @@ -1316,6 +1371,33 @@ a PKCS#12 container includes trusted certificate ("anchor") entries look for `openssl pkcs12 -info` output, or `trustedCertEntry` in the `keytool -list` output. +[float] +===== PKCS#11 tokens + +When using a PKCS#11 cryptographic token, which contains the +private key, certificate, and certificates that should be trusted, use +the following settings: + +`xpack.ssl.keystore.type`:: +Set this to `PKCS11`. + +`xpack.ssl.truststore.type`:: +Set this to `PKCS11`. + + +[[pkcs11-truststore-note]] +[NOTE] +When configuring the PKCS#11 token that your JVM is configured to use as +a keystore or a truststore for Elasticsearch, the PIN for the token can be +configured by setting the appropriate value to `xpack.ssl.truststore.password` +or `xpack.ssl.truststore.secure_password`. In the absence of the above, {es} will +fallback to use he appropriate JVM setting (`-Djavax.net.ssl.trustStorePassword`) +if that is set. +Since there can only be one PKCS#11 token configured, only one keystore and +truststore will be usable for configuration in {es}. This in turn means +that only one certificate can be used for TLS both in the transport and the +http layer. + [[http-tls-ssl-settings]] :ssl-prefix: xpack.security.http :component: HTTP diff --git a/docs/reference/settings/ssl-settings.asciidoc b/docs/reference/settings/ssl-settings.asciidoc index 12729abf4ab..2d513c74237 100644 --- a/docs/reference/settings/ssl-settings.asciidoc +++ b/docs/reference/settings/ssl-settings.asciidoc @@ -145,3 +145,17 @@ Password to the PKCS#12 file. +{ssl-prefix}.ssl.truststore.secure_password+ (<>):: Password to the PKCS#12 file. + +===== PKCS#11 Tokens + +{security} can be configured to use a PKCS#11 token that contains the private key, +certificate and certificates that should be trusted. + +PKCS#11 token require additional configuration on the JVM level and can be enabled +via the following settings: + ++{ssl-prefix}.keystore.type+:: +Set this to `PKCS11` to indicate that the PKCS#11 token should be used as a keystore. + ++{ssl-prefix}.truststore.type+:: +Set this to `PKCS11` to indicate that the PKCS#11 token should be used as a truststore. \ No newline at end of file diff --git a/docs/reference/sql/concepts.asciidoc b/docs/reference/sql/concepts.asciidoc index dab33618762..aceea1949a2 100644 --- a/docs/reference/sql/concepts.asciidoc +++ b/docs/reference/sql/concepts.asciidoc @@ -62,4 +62,4 @@ Multiple clusters, each with its own namespace, connected to each other in a fed |=== -As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangeably through-out the rest of the material. \ No newline at end of file +As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangeably through-out the rest of the material. diff --git a/docs/reference/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc index a8a866ac93c..98589043f61 100644 --- a/docs/reference/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -44,7 +44,7 @@ from `artifacts.elastic.co/maven` by adding it to the repositories list: === Setup The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. -Note the driver implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically +Note the driver implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registered automatically as long as its available in the classpath. Once registered, the driver understands the following syntax as an URL: @@ -182,4 +182,4 @@ connection. For example: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{jdbc-tests}/SimpleExampleTestCase.java[simple_example] --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- diff --git a/docs/reference/sql/functions/math.asciidoc b/docs/reference/sql/functions/math.asciidoc index e84a71b1e91..975feac7429 100644 --- a/docs/reference/sql/functions/math.asciidoc +++ b/docs/reference/sql/functions/math.asciidoc @@ -276,6 +276,30 @@ include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerPositive] include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerNegative] -------------------------------------------------- +[[sql-functions-math-random]] +===== `RANDOM` + +.Synopsis: +[source, sql] +-------------------------------------------------- +RANDOM(seed<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns a random double using the given seed. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathRandom] +-------------------------------------------------- + [[sql-functions-math-round]] ===== `ROUND` diff --git a/docs/reference/sql/functions/string.asciidoc b/docs/reference/sql/functions/string.asciidoc index 873ac18d812..04b2937e6f9 100644 --- a/docs/reference/sql/functions/string.asciidoc +++ b/docs/reference/sql/functions/string.asciidoc @@ -271,6 +271,29 @@ Returns the characters of `string_exp`, with leading blanks removed. include-tagged::{sql-specs}/docs.csv-spec[stringLTrim] -------------------------------------------------- +[[sql-functions-string-octet-length]] +==== `OCTET_LENGTH` + +.Synopsis: +[source, sql] +-------------------------------------------------- +OCTET_LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: + +<1> string expression + +*Output*: integer + +.Description: + +Returns the length in bytes of the `string_exp` input expression. + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[stringOctetLength] +-------------------------------------------------- + [[sql-functions-string-position]] ==== `POSITION` diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java index 5c769774f61..2d50f160812 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java @@ -31,12 +31,12 @@ import org.elasticsearch.search.MultiValueMode; final class DateField { // no instance private DateField() {} - + // supported variables static final String VALUE_VARIABLE = "value"; static final String EMPTY_VARIABLE = "empty"; static final String LENGTH_VARIABLE = "length"; - + // supported methods static final String GETVALUE_METHOD = "getValue"; static final String ISEMPTY_METHOD = "isEmpty"; @@ -47,7 +47,7 @@ final class DateField { static final String MEDIAN_METHOD = "median"; static final String SUM_METHOD = "sum"; static final String COUNT_METHOD = "count"; - + // date-specific static final String GET_YEAR_METHOD = "getYear"; static final String GET_MONTH_METHOD = "getMonth"; @@ -55,7 +55,7 @@ final class DateField { static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay"; static final String GET_MINUTES_METHOD = "getMinutes"; static final String GET_SECONDS_METHOD = "getSeconds"; - + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { case VALUE_VARIABLE: @@ -68,7 +68,7 @@ final class DateField { throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for date field [" + fieldName + "]."); } } - + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { switch (method) { case GETVALUE_METHOD: diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 9d305a5f2d9..7330bd6ea5d 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -37,7 +37,6 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; import org.elasticsearch.script.ClassPermission; @@ -47,6 +46,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; +import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; @@ -128,6 +128,9 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE } else if (context.instanceClazz.equals(ScoreScript.class)) { ScoreScript.Factory factory = (p, lookup) -> newScoreScript(expr, lookup, p); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(TermsSetQueryScript.class)) { + TermsSetQueryScript.Factory factory = (p, lookup) -> newTermsSetQueryScript(expr, lookup, p); + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } @@ -164,7 +167,6 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE } private SearchScript.LeafFactory newSearchScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { - MapperService mapper = lookup.doc().mapperService(); // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, // instead of complicating SimpleBindings (which should stay simple) SimpleBindings bindings = new SimpleBindings(); @@ -183,100 +185,11 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE // way to know this is for aggregations and so _value is ok to have... } else if (vars != null && vars.containsKey(variable)) { - // TODO: document and/or error if vars contains _score? - // NOTE: by checking for the variable in vars first, it allows masking document fields with a global constant, - // but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field? - Object value = vars.get(variable); - if (value instanceof Number) { - bindings.add(variable, new DoubleConstValueSource(((Number) value).doubleValue()).asDoubleValuesSource()); - } else { - throw new ParseException("Parameter [" + variable + "] must be a numeric type", 0); - } - + bindFromParams(vars, bindings, variable); } else { - String fieldname = null; - String methodname = null; - String variablename = "value"; // .value is the default for doc['field'], its optional. - boolean dateAccessor = false; // true if the variable is of type doc['field'].date.xxx - VariableContext[] parts = VariableContext.parse(variable); - if (parts[0].text.equals("doc") == false) { - throw new ParseException("Unknown variable [" + parts[0].text + "]", 0); - } - if (parts.length < 2 || parts[1].type != VariableContext.Type.STR_INDEX) { - throw new ParseException("Variable 'doc' must be used with a specific field like: doc['myfield']", 3); - } else { - fieldname = parts[1].text; - } - if (parts.length == 3) { - if (parts[2].type == VariableContext.Type.METHOD) { - methodname = parts[2].text; - } else if (parts[2].type == VariableContext.Type.MEMBER) { - variablename = parts[2].text; - } else { - throw new IllegalArgumentException("Only member variables or member methods may be accessed on a field when not accessing the field directly"); - } - } - if (parts.length > 3) { - // access to the .date "object" within the field - if (parts.length == 4 && ("date".equals(parts[2].text) || "getDate".equals(parts[2].text))) { - if (parts[3].type == VariableContext.Type.METHOD) { - methodname = parts[3].text; - dateAccessor = true; - } else if (parts[3].type == VariableContext.Type.MEMBER) { - variablename = parts[3].text; - dateAccessor = true; - } - } - if (!dateAccessor) { - throw new IllegalArgumentException("Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()"); - } - } - - MappedFieldType fieldType = mapper.fullName(fieldname); - - if (fieldType == null) { - throw new ParseException("Field [" + fieldname + "] does not exist in mappings", 5); - } - - IndexFieldData fieldData = lookup.doc().getForField(fieldType); - // delegate valuesource creation based on field's type // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. - - final ValueSource valueSource; - if (fieldType instanceof GeoPointFieldType) { - // geo - if (methodname == null) { - valueSource = GeoField.getVariable(fieldData, fieldname, variablename); - } else { - valueSource = GeoField.getMethod(fieldData, fieldname, methodname); - } - } else if (fieldType instanceof DateFieldMapper.DateFieldType) { - if (dateAccessor) { - // date object - if (methodname == null) { - valueSource = DateObject.getVariable(fieldData, fieldname, variablename); - } else { - valueSource = DateObject.getMethod(fieldData, fieldname, methodname); - } - } else { - // date field itself - if (methodname == null) { - valueSource = DateField.getVariable(fieldData, fieldname, variablename); - } else { - valueSource = DateField.getMethod(fieldData, fieldname, methodname); - } - } - } else if (fieldData instanceof IndexNumericFieldData) { - // number - if (methodname == null) { - valueSource = NumericField.getVariable(fieldData, fieldname, variablename); - } else { - valueSource = NumericField.getMethod(fieldData, fieldname, methodname); - } - } else { - throw new ParseException("Field [" + fieldname + "] must be numeric, date, or geopoint", 5); - } + final ValueSource valueSource = getDocValueSource(variable, lookup); needsScores |= valueSource.getSortField(false).needsScores(); bindings.add(variable, valueSource.asDoubleValuesSource()); } @@ -288,6 +201,30 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE return new ExpressionSearchScript(expr, bindings, specialValue, needsScores); } + private TermsSetQueryScript.LeafFactory newTermsSetQueryScript(Expression expr, SearchLookup lookup, + @Nullable Map vars) { + // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, + // instead of complicating SimpleBindings (which should stay simple) + SimpleBindings bindings = new SimpleBindings(); + for (String variable : expr.variables) { + try { + if (vars != null && vars.containsKey(variable)) { + bindFromParams(vars, bindings, variable); + } else { + // delegate valuesource creation based on field's type + // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. + final ValueSource valueSource = getDocValueSource(variable, lookup); + bindings.add(variable, valueSource.asDoubleValuesSource()); + } + } catch (Exception e) { + // we defer "binding" of variables until here: give context for that variable + throw convertToScriptException("link error", expr.sourceText, variable, e); + } + } + ReplaceableConstDoubleValueSource specialValue = null; + return new ExpressionTermSetQueryScript(expr, bindings, specialValue); + } + /** * This is a hack for filter scripts, which must return booleans instead of doubles as expression do. * See https://github.com/elastic/elasticsearch/issues/26429. @@ -362,4 +299,106 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE stack.add(pointer.toString()); throw new ScriptException(message, cause, stack, source, NAME); } + + private static ValueSource getDocValueSource(String variable, SearchLookup lookup) throws ParseException { + VariableContext[] parts = VariableContext.parse(variable); + if (parts[0].text.equals("doc") == false) { + throw new ParseException("Unknown variable [" + parts[0].text + "]", 0); + } + if (parts.length < 2 || parts[1].type != VariableContext.Type.STR_INDEX) { + throw new ParseException("Variable 'doc' must be used with a specific field like: doc['myfield']", 3); + } + + // .value is the default for doc['field'], its optional. + String variablename = "value"; + String methodname = null; + if (parts.length == 3) { + if (parts[2].type == VariableContext.Type.METHOD) { + methodname = parts[2].text; + } else if (parts[2].type == VariableContext.Type.MEMBER) { + variablename = parts[2].text; + } else { + throw new IllegalArgumentException( + "Only member variables or member methods may be accessed on a field when not accessing the field directly" + ); + } + } + // true if the variable is of type doc['field'].date.xxx + boolean dateAccessor = false; + if (parts.length > 3) { + // access to the .date "object" within the field + if (parts.length == 4 && ("date".equals(parts[2].text) || "getDate".equals(parts[2].text))) { + if (parts[3].type == VariableContext.Type.METHOD) { + methodname = parts[3].text; + dateAccessor = true; + } else if (parts[3].type == VariableContext.Type.MEMBER) { + variablename = parts[3].text; + dateAccessor = true; + } + } + if (!dateAccessor) { + throw new IllegalArgumentException( + "Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()" + ); + } + } + + String fieldname = parts[1].text; + MappedFieldType fieldType = lookup.doc().mapperService().fullName(fieldname); + + if (fieldType == null) { + throw new ParseException("Field [" + fieldname + "] does not exist in mappings", 5); + } + + IndexFieldData fieldData = lookup.doc().getForField(fieldType); + final ValueSource valueSource; + if (fieldType instanceof GeoPointFieldType) { + // geo + if (methodname == null) { + valueSource = GeoField.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = GeoField.getMethod(fieldData, fieldname, methodname); + } + } else if (fieldType instanceof DateFieldMapper.DateFieldType) { + if (dateAccessor) { + // date object + if (methodname == null) { + valueSource = DateObject.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = DateObject.getMethod(fieldData, fieldname, methodname); + } + } else { + // date field itself + if (methodname == null) { + valueSource = DateField.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = DateField.getMethod(fieldData, fieldname, methodname); + } + } + } else if (fieldData instanceof IndexNumericFieldData) { + // number + if (methodname == null) { + valueSource = NumericField.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = NumericField.getMethod(fieldData, fieldname, methodname); + } + } else { + throw new ParseException("Field [" + fieldname + "] must be numeric, date, or geopoint", 5); + } + return valueSource; + } + + // TODO: document and/or error if params contains _score? + // NOTE: by checking for the variable in params first, it allows masking document fields with a global constant, + // but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field? + private static void bindFromParams(@Nullable final Map params, final SimpleBindings bindings, final String variable) throws ParseException { + // NOTE: by checking for the variable in vars first, it allows masking document fields with a global constant, + // but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field? + Object value = params.get(variable); + if (value instanceof Number) { + bindings.add(variable, new DoubleConstValueSource(((Number) value).doubleValue()).asDoubleValuesSource()); + } else { + throw new ParseException("Parameter [" + variable + "] must be a numeric type", 0); + } + } } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionTermSetQueryScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionTermSetQueryScript.java new file mode 100644 index 00000000000..d2fd77713ea --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionTermSetQueryScript.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import org.apache.lucene.expressions.Bindings; +import org.apache.lucene.expressions.Expression; +import org.apache.lucene.expressions.SimpleBindings; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DoubleValues; +import org.apache.lucene.search.DoubleValuesSource; +import org.elasticsearch.script.GeneralScriptException; +import org.elasticsearch.script.TermsSetQueryScript; + +/** + * A bridge to evaluate an {@link Expression} against {@link Bindings} in the context + * of a {@link TermsSetQueryScript}. + */ +class ExpressionTermSetQueryScript implements TermsSetQueryScript.LeafFactory { + + final Expression exprScript; + final SimpleBindings bindings; + final DoubleValuesSource source; + final ReplaceableConstDoubleValueSource specialValue; // _value + + ExpressionTermSetQueryScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v) { + exprScript = e; + bindings = b; + source = exprScript.getDoubleValuesSource(bindings); + specialValue = v; + } + + @Override + public TermsSetQueryScript newInstance(final LeafReaderContext leaf) throws IOException { + return new TermsSetQueryScript() { + // Fake the scorer until setScorer is called. + DoubleValues values = source.getValues(leaf, null); + + @Override + public Number execute() { + try { + return values.doubleValue(); + } catch (Exception exception) { + throw new GeneralScriptException("Error evaluating " + exprScript, exception); + } + } + + @Override + public void setDocument(int d) { + try { + values.advanceExact(d); + } catch (IOException e) { + throw new IllegalStateException("Can't advance to doc using " + exprScript, e); + } + } + }; + } + +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java index 7cd918cf914..06875632134 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java @@ -29,12 +29,12 @@ import org.elasticsearch.search.MultiValueMode; final class NumericField { // no instance private NumericField() {} - + // supported variables static final String VALUE_VARIABLE = "value"; static final String EMPTY_VARIABLE = "empty"; static final String LENGTH_VARIABLE = "length"; - + // supported methods static final String GETVALUE_METHOD = "getValue"; static final String ISEMPTY_METHOD = "isEmpty"; @@ -45,7 +45,7 @@ final class NumericField { static final String MEDIAN_METHOD = "median"; static final String SUM_METHOD = "sum"; static final String COUNT_METHOD = "count"; - + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { case VALUE_VARIABLE: @@ -55,11 +55,11 @@ final class NumericField { case LENGTH_VARIABLE: return new CountMethodValueSource(fieldData); default: - throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " + + throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]."); } } - + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { switch (method) { case GETVALUE_METHOD: diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java new file mode 100644 index 00000000000..c7eae2446a6 --- /dev/null +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import java.text.ParseException; +import java.util.Collections; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.script.TermsSetQueryScript; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESTestCase; + +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExpressionTermsSetQueryTests extends ESTestCase { + private ExpressionScriptEngine service; + private SearchLookup lookup; + + @Override + public void setUp() throws Exception { + super.setUp(); + + NumberFieldType fieldType = new NumberFieldType(NumberType.DOUBLE); + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); + when(doubleValues.advanceExact(anyInt())).thenReturn(true); + when(doubleValues.nextValue()).thenReturn(2.718); + + AtomicNumericFieldData atomicFieldData = mock(AtomicNumericFieldData.class); + when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues); + + IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + when(fieldData.load(anyObject())).thenReturn(atomicFieldData); + + service = new ExpressionScriptEngine(Settings.EMPTY); + lookup = new SearchLookup(mapperService, ignored -> fieldData, null); + } + + private TermsSetQueryScript.LeafFactory compile(String expression) { + TermsSetQueryScript.Factory factory = + service.compile(null, expression, TermsSetQueryScript.CONTEXT, Collections.emptyMap()); + return factory.newFactory(Collections.emptyMap(), lookup); + } + + public void testCompileError() { + ScriptException e = expectThrows(ScriptException.class, () -> { + compile("doc['field'].value * *@#)(@$*@#$ + 4"); + }); + assertTrue(e.getCause() instanceof ParseException); + } + + public void testLinkError() { + ScriptException e = expectThrows(ScriptException.class, () -> { + compile("doc['nonexistent'].value * 5"); + }); + assertTrue(e.getCause() instanceof ParseException); + } + + public void testFieldAccess() throws IOException { + TermsSetQueryScript script = compile("doc['field'].value").newInstance(null); + script.setDocument(1); + + double result = script.execute().doubleValue(); + assertEquals(2.718, result, 0.0); + } + + public void testFieldAccessWithFieldAlias() throws IOException { + TermsSetQueryScript script = compile("doc['alias'].value").newInstance(null); + script.setDocument(1); + + double result = script.execute().doubleValue(); + assertEquals(2.718, result, 0.0); + } +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java index b739fc1cfd0..ca28d12a7bd 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java @@ -23,10 +23,10 @@ import com.github.mustachejava.MustacheException; import com.github.mustachejava.MustacheFactory; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -51,7 +51,7 @@ import java.util.Map; * {@link Mustache} object can then be re-used for subsequent executions. */ public final class MustacheScriptEngine implements ScriptEngine { - private static final Logger logger = ESLoggerFactory.getLogger(MustacheScriptEngine.class); + private static final Logger logger = LogManager.getLogger(MustacheScriptEngine.class); public static final String NAME = "mustache"; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index e6ed475a7be..0fbdfa763ea 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -96,7 +96,7 @@ final class Compiler { if (found != null) { return found; } - found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); + found = painlessLookup.javaClassNameToClass(name); return found != null ? found : super.findClass(name); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/StaticTest.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/StaticTest.java new file mode 100644 index 00000000000..4a4f27b8f21 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/StaticTest.java @@ -0,0 +1,26 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +public class StaticTest { + public static int staticAddIntsTest(int x, int y) { + return x + y; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index ed9e7668336..ce31db43eef 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -34,22 +34,28 @@ import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToBoxe public final class PainlessLookup { + private final Map> javaClassNamesToClasses; private final Map> canonicalClassNamesToClasses; private final Map, PainlessClass> classesToPainlessClasses; private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessClassBindings; - PainlessLookup(Map> canonicalClassNamesToClasses, Map, PainlessClass> classesToPainlessClasses, + PainlessLookup( + Map> javaClassNamesToClasses, + Map> canonicalClassNamesToClasses, + Map, PainlessClass> classesToPainlessClasses, Map painlessMethodKeysToImportedPainlessMethods, Map painlessMethodKeysToPainlessClassBindings) { + Objects.requireNonNull(javaClassNamesToClasses); Objects.requireNonNull(canonicalClassNamesToClasses); Objects.requireNonNull(classesToPainlessClasses); Objects.requireNonNull(painlessMethodKeysToImportedPainlessMethods); Objects.requireNonNull(painlessMethodKeysToPainlessClassBindings); + this.javaClassNamesToClasses = javaClassNamesToClasses; this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses); this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses); @@ -57,6 +63,10 @@ public final class PainlessLookup { this.painlessMethodKeysToPainlessClassBindings = Collections.unmodifiableMap(painlessMethodKeysToPainlessClassBindings); } + public Class javaClassNameToClass(String javaClassName) { + return javaClassNamesToClasses.get(javaClassName); + } + public boolean isValidCanonicalClassName(String canonicalClassName) { Objects.requireNonNull(canonicalClassName); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index b3bc8580b38..552ad56f68a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -120,6 +120,15 @@ public final class PainlessLookupBuilder { return painlessLookupBuilder.build(); } + // javaClassNamesToClasses is all the classes that need to be available to the custom classloader + // including classes used as part of imported methods and class bindings but not necessarily whitelisted + // individually. The values of javaClassNamesToClasses are a superset of the values of + // canonicalClassNamesToClasses. + private final Map> javaClassNamesToClasses; + // canonicalClassNamesToClasses is all the whitelisted classes available in a Painless script including + // classes with imported canonical names but does not include classes from imported methods or class + // bindings unless also whitelisted separately. The values of canonicalClassNamesToClasses are a subset + // of the values of javaClassNamesToClasses. private final Map> canonicalClassNamesToClasses; private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; @@ -127,6 +136,7 @@ public final class PainlessLookupBuilder { private final Map painlessMethodKeysToPainlessClassBindings; public PainlessLookupBuilder() { + javaClassNamesToClasses = new HashMap<>(); canonicalClassNamesToClasses = new HashMap<>(); classesToPainlessClassBuilders = new HashMap<>(); @@ -189,7 +199,16 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - Class existingClass = canonicalClassNamesToClasses.get(canonicalClassName); + Class existingClass = javaClassNamesToClasses.get(clazz.getName()); + + if (existingClass == null) { + javaClassNamesToClasses.put(clazz.getName(), clazz); + } else if (existingClass != clazz) { + throw new IllegalArgumentException("class [" + canonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); + } + + existingClass = canonicalClassNamesToClasses.get(canonicalClassName); if (existingClass != null && existingClass != clazz) { throw new IllegalArgumentException("class [" + canonicalClassName + "] " + @@ -685,19 +704,20 @@ public final class PainlessLookupBuilder { } String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + Class existingTargetClass = javaClassNamesToClasses.get(targetClass.getName()); + + if (existingTargetClass == null) { + javaClassNamesToClasses.put(targetClass.getName(), targetClass); + } else if (existingTargetClass != targetClass) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); + } if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( "invalid imported method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."); } - PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); - - if (painlessClassBuilder == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); - } - int typeParametersSize = typeParameters.size(); List> javaTypeParameters = new ArrayList<>(typeParametersSize); @@ -825,6 +845,14 @@ public final class PainlessLookupBuilder { } String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + Class existingTargetClass = javaClassNamesToClasses.get(targetClass.getName()); + + if (existingTargetClass == null) { + javaClassNamesToClasses.put(targetClass.getName(), targetClass); + } else if (existingTargetClass != targetClass) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); + } Constructor[] javaConstructors = targetClass.getConstructors(); Constructor javaConstructor = null; @@ -959,7 +987,23 @@ public final class PainlessLookupBuilder { classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build()); } - return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses, + if (javaClassNamesToClasses.values().containsAll(canonicalClassNamesToClasses.values()) == false) { + throw new IllegalArgumentException("the values of java class names to classes " + + "must be a superset of the values of canonical class names to classes"); + } + + if (javaClassNamesToClasses.values().containsAll(classesToPainlessClasses.keySet()) == false) { + throw new IllegalArgumentException("the values of java class names to classes " + + "must be a superset of the keys of classes to painless classes"); + } + + if (canonicalClassNamesToClasses.values().containsAll(classesToPainlessClasses.keySet()) == false || + classesToPainlessClasses.keySet().containsAll(canonicalClassNamesToClasses.values()) == false) { + throw new IllegalArgumentException("the values of canonical class names to classes " + + "must have the same classes as the keys of classes to painless classes"); + } + + return new PainlessLookup(javaClassNamesToClasses, canonicalClassNamesToClasses, classesToPainlessClasses, painlessMethodKeysToImportedPainlessMethods, painlessMethodKeysToPainlessClassBindings); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index b8a1af073bf..8ba8b79b74a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -143,7 +143,7 @@ * described by later documentation. *

* Storebable nodes have three methods for writing -- setup, load, and store. These methods - * are used in conjuction with a parent node aware of the storeable node (lhs) that has a node + * are used in conjunction with a parent node aware of the storeable node (lhs) that has a node * representing a value to store (rhs). The setup method is always once called before a store * to give storeable nodes a chance to write any prefixes they may have and any values such as * array indices before the store happens. Load is called on a storeable node that must also @@ -152,7 +152,7 @@ * Sub nodes are partial nodes that require a parent to work correctly. These nodes can really * represent anything the parent node would like to split up into logical pieces and don't really * have any distinct set of rules. The currently existing subnodes all have ANode as a super class - * somewhere in their class heirachy so the parent node can defer some analysis and writing to + * somewhere in their class hierarchy so the parent node can defer some analysis and writing to * the sub node. */ package org.elasticsearch.painless.node; diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 7ac13c03876..9bb7ea94826 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -261,6 +261,7 @@ class org.elasticsearch.painless.FeatureTest no_import { # for testing static_import { + int staticAddIntsTest(int, int) from_class org.elasticsearch.painless.StaticTest float staticAddFloatsTest(float, float) from_class org.elasticsearch.painless.FeatureTest int testAddWithState(int, int, int, double) bound_to org.elasticsearch.painless.BindingTest } \ No newline at end of file diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index c5cc723ca84..6a775825117 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -136,6 +136,7 @@ public class BasicAPITests extends ScriptTestCase { } public void testStatic() { + assertEquals(10, exec("staticAddIntsTest(7, 3)")); assertEquals(15.5f, exec("staticAddFloatsTest(6.5f, 9.0f)")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 1460d5f2359..ac76a8c0408 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -20,8 +20,8 @@ package org.elasticsearch.painless; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessConstructor; @@ -55,7 +55,7 @@ import static java.util.Comparator.comparing; public class PainlessDocGenerator { private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS); - private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class); + private static final Logger logger = LogManager.getLogger(PainlessDocGenerator.class); private static final Comparator FIELD_NAME = comparing(f -> f.javaField.getName()); private static final Comparator METHOD_NAME = comparing(m -> m.javaMethod.getName()); private static final Comparator METHOD_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size()); @@ -434,7 +434,7 @@ public class PainlessDocGenerator { if (classPackage.startsWith("org.apache.lucene")) { return "lucene-core"; } - throw new IllegalArgumentException("Unrecognized packge: " + classPackage); + throw new IllegalArgumentException("Unrecognized package: " + classPackage); } private static void emitGeneratedWarning(PrintStream stream) { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java index 44cd6b5304d..e70d728091f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java @@ -83,7 +83,7 @@ public class PainlessExecuteRequestTests extends AbstractStreamableXContentTestC QueryBuilder query = randomBoolean() ? new MatchAllQueryBuilder() : null; // TODO: pass down XContextType to createTestInstance() method. // otherwise the document itself is different causing test failures. - // This should be done in a seperate change as the test instance is created before xcontent type is randomly picked and + // This should be done in a separate change as the test instance is created before xcontent type is randomly picked and // all the createTestInstance() methods need to be changed, which will make this a big chnage // BytesReference doc = randomBoolean() ? new BytesArray("{}") : null; BytesReference doc = null; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index 6efff154b62..991d97b6196 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -42,8 +42,8 @@ import java.util.stream.Collectors; /** * Returns the results for a {@link RankEvalRequest}.
- * The repsonse contains a detailed section for each evaluation query in the request and - * possible failures that happened when executin individual queries. + * The response contains a detailed section for each evaluation query in the request and + * possible failures that happened when execution individual queries. **/ public class RankEvalResponse extends ActionResponse implements ToXContentObject { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 5c12e85bb4c..7efd1ee5d6e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -481,7 +481,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { /** * Execute a bulk retry test case. The total number of failures is random and the number of retries attempted is set to - * testRequest.getMaxRetries and controled by the failWithRejection parameter. + * testRequest.getMaxRetries and controlled by the failWithRejection parameter. */ private void bulkRetryTestCase(boolean failWithRejection) throws Exception { int totalFailures = randomIntBetween(1, testRequest.getMaxRetries()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java index d2110c5cded..6b7b21a5514 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java @@ -122,7 +122,7 @@ public class CancelTests extends ReindexTestCase { logger.debug("waiting for updates to be blocked"); boolean blocked = awaitBusy( () -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0, - 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavilly loaded machines this can wake a while + 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavily loaded machines this can take a while assertTrue("updates blocked", blocked); // Status should show the task running diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 05e3b1807f0..210b440ebb9 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -34,13 +34,13 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr dependencies { // network stack - compile "io.netty:netty-buffer:4.1.29.Final" - compile "io.netty:netty-codec:4.1.29.Final" - compile "io.netty:netty-codec-http:4.1.29.Final" - compile "io.netty:netty-common:4.1.29.Final" - compile "io.netty:netty-handler:4.1.29.Final" - compile "io.netty:netty-resolver:4.1.29.Final" - compile "io.netty:netty-transport:4.1.29.Final" + compile "io.netty:netty-buffer:${versions.netty}" + compile "io.netty:netty-codec:${versions.netty}" + compile "io.netty:netty-codec-http:${versions.netty}" + compile "io.netty:netty-common:${versions.netty}" + compile "io.netty:netty-handler:${versions.netty}" + compile "io.netty:netty-resolver:${versions.netty}" + compile "io.netty:netty-transport:${versions.netty}" } dependencyLicenses { @@ -111,6 +111,7 @@ thirdPartyAudit.excludes = [ // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional 'org.slf4j.Logger', 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', 'com.google.protobuf.ExtensionRegistryLite', 'com.google.protobuf.MessageLiteOrBuilder', @@ -145,6 +146,7 @@ thirdPartyAudit.excludes = [ 'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$2', 'io.netty.util.internal.PlatformDependent0$3', + 'io.netty.util.internal.PlatformDependent0$5', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 deleted file mode 100644 index 17798a82aa7..00000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..070bb3f8332 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +597adb653306470fb3ec1af3c0f3f30a37b1310a \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 deleted file mode 100644 index f892420795b..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1651bc2e279216773c234cafe402d68d2a5adc90 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..cd0786f3e9f --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +515c8f609aaca28a94f984d89a9667dd3359c1b1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 deleted file mode 100644 index aa97345bad1..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -454688b88cea27a4d407202d1fc79a6522345b5e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..e795cce1ba0 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +1384c630e8a0eeef33ad12a28791dce6e1d8767c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 deleted file mode 100644 index 47140876e6a..00000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5d6a735ed07d8f197daa48db7f097cfc971ee5e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..079e35ecc4c --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +5dca0c34d8f38af51a2398614e81888f51cf811a \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 deleted file mode 100644 index 7c2d407f75e..00000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1acf1d94799296a2517533ec75ce7e155e9c4ea7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..50b87cdac13 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +ecc076332ed103411347f4806a44ee32d9d9cb5f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 deleted file mode 100644 index bac08f57079..00000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bbec1dc913732e4773893c14d795b15d6c1e878e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..2ef4d895143 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +5106fd687066ffd712e5295d32af4e2ac6482613 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 deleted file mode 100644 index 0ce64132afb..00000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c190b90f70e2ae8a48c068afad709e8728fcaa39 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.30.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..e4febd661cb --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +3d27bb432a3b125167ac161b26415ad29ec17f02 \ No newline at end of file diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java index 4989cd35f7b..16513c57bb3 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java @@ -49,19 +49,6 @@ public final class Netty4CorsConfigBuilder { return new Netty4CorsConfigBuilder(); } - /** - * Creates a {@link Netty4CorsConfigBuilder} instance with the specified origin. - * - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public static Netty4CorsConfigBuilder forOrigin(final String origin) { - if ("*".equals(origin)) { - return new Netty4CorsConfigBuilder(); - } - return new Netty4CorsConfigBuilder(origin); - } - - /** * Create a {@link Netty4CorsConfigBuilder} instance with the specified pattern origin. * @@ -94,7 +81,6 @@ public final class Netty4CorsConfigBuilder { final Set requestMethods = new HashSet<>(); final Set requestHeaders = new HashSet<>(); final Map> preflightHeaders = new HashMap<>(); - private boolean noPreflightHeaders; boolean shortCircuit; /** @@ -130,18 +116,6 @@ public final class Netty4CorsConfigBuilder { anyOrigin = false; } - /** - * Web browsers may set the 'Origin' request header to 'null' if a resource is loaded - * from the local file system. Calling this method will enable a successful CORS response - * with a wildcard for the CORS response header 'Access-Control-Allow-Origin'. - * - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - Netty4CorsConfigBuilder allowNullOrigin() { - allowNullOrigin = true; - return this; - } - /** * Disables CORS support. * @@ -219,71 +193,6 @@ public final class Netty4CorsConfigBuilder { return this; } - /** - * Returns HTTP response headers that should be added to a CORS preflight response. - * - * An intermediary like a load balancer might require that a CORS preflight request - * have certain headers set. This enables such headers to be added. - * - * @param name the name of the HTTP header. - * @param values the values for the HTTP header. - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Object... values) { - if (values.length == 1) { - preflightHeaders.put(name, new ConstantValueGenerator(values[0])); - } else { - preflightResponseHeader(name, Arrays.asList(values)); - } - return this; - } - - /** - * Returns HTTP response headers that should be added to a CORS preflight response. - * - * An intermediary like a load balancer might require that a CORS preflight request - * have certain headers set. This enables such headers to be added. - * - * @param name the name of the HTTP header. - * @param value the values for the HTTP header. - * @param the type of values that the Iterable contains. - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Iterable value) { - preflightHeaders.put(name, new ConstantValueGenerator(value)); - return this; - } - - /** - * Returns HTTP response headers that should be added to a CORS preflight response. - * - * An intermediary like a load balancer might require that a CORS preflight request - * have certain headers set. This enables such headers to be added. - * - * Some values must be dynamically created when the HTTP response is created, for - * example the 'Date' response header. This can be accomplished by using a Callable - * which will have its 'call' method invoked when the HTTP response is created. - * - * @param name the name of the HTTP header. - * @param valueGenerator a Callable which will be invoked at HTTP response creation. - * @param the type of the value that the Callable can return. - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Callable valueGenerator) { - preflightHeaders.put(name, valueGenerator); - return this; - } - - /** - * Specifies that no preflight response headers should be added to a preflight response. - * - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder noPreflightResponseHeaders() { - noPreflightHeaders = true; - return this; - } - /** * Specifies that a CORS request should be rejected if it's invalid before being * further processing. @@ -305,7 +214,7 @@ public final class Netty4CorsConfigBuilder { * @return {@link Netty4CorsConfig} the configured CorsConfig instance. */ public Netty4CorsConfig build() { - if (preflightHeaders.isEmpty() && !noPreflightHeaders) { + if (preflightHeaders.isEmpty()) { preflightHeaders.put("date", DateValueGenerator.INSTANCE); preflightHeaders.put("content-length", new ConstantValueGenerator("0")); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 76d7864c716..ad216f8ff2c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -22,8 +22,6 @@ package org.elasticsearch.transport.netty4; import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; -import io.netty.channel.Channel; -import io.netty.channel.ChannelFuture; import io.netty.util.NettyRuntime; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; @@ -34,7 +32,6 @@ import org.elasticsearch.common.bytes.BytesReference; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; @@ -133,27 +130,4 @@ public class Netty4Utils { return new ByteBufBytesReference(buffer, size); } - public static void closeChannels(final Collection channels) throws IOException { - IOException closingExceptions = null; - final List futures = new ArrayList<>(); - for (final Channel channel : channels) { - try { - if (channel != null && channel.isOpen()) { - futures.add(channel.close()); - } - } catch (Exception e) { - if (closingExceptions == null) { - closingExceptions = new IOException("failed to close channels"); - } - closingExceptions.addSuppressed(e); - } - } - for (final ChannelFuture future : futures) { - future.awaitUninterruptibly(); - } - - if (closingExceptions != null) { - throw closingExceptions; - } - } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java index ce8a635ffb6..3bcefe0cf56 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java @@ -60,7 +60,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - String[] equilavent = {"I WİLL USE TURKİSH CASING", "ı will use turkish casıng"}; + String[] equivalent = {"I WİLL USE TURKİSH CASING", "ı will use turkish casıng"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -75,8 +75,8 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { // both values should collate to same value indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value @@ -85,7 +85,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -100,7 +100,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - String[] equilavent = {"a", "C", "a", "B"}; + String[] equivalent = {"a", "C", "a", "B"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -114,9 +114,9 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { // everything should be indexed fine, no exceptions indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":[\"" + equilavent[0] + "\", \"" - + equilavent[1] + "\"]}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[2] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":[\"" + equivalent[0] + "\", \"" + + equivalent[1] + "\"]}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[2] + "\"}", XContentType.JSON) ); // using sort mode = max, values B and C will be used for the sort @@ -161,7 +161,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - String[] equilavent = {"I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"}; + String[] equivalent = {"I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -176,8 +176,8 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value @@ -186,7 +186,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -204,7 +204,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - String[] equilavent = {"TESTING", "testing"}; + String[] equivalent = {"TESTING", "testing"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -219,8 +219,8 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest() @@ -228,7 +228,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -247,7 +247,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - String[] equilavent = {"foo-bar", "foo bar"}; + String[] equivalent = {"foo-bar", "foo bar"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -262,8 +262,8 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest() @@ -271,7 +271,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -467,7 +467,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { RuleBasedCollator tailoredCollator = new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings); String tailoredRules = tailoredCollator.getRules(); - String[] equilavent = {"Töne", "Toene"}; + String[] equivalent = {"Töne", "Toene"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -481,8 +481,8 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest() @@ -490,7 +490,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate", SortOrder.ASC) .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); diff --git a/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml b/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml index 3731a8b2112..bac4e1014ef 100644 --- a/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml +++ b/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml @@ -1,7 +1,7 @@ # tests that the custom suggester works # the issue that prompted serializing Suggestion as a registered named writeable was not revealed until -# a user found that it would fail when reducing suggestions in a multi node envrionment +# a user found that it would fail when reducing suggestions in a multi node environment # https://github.com/elastic/elasticsearch/issues/26585 "test custom suggester": - do: diff --git a/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleStaticMethodClass.java b/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleStaticMethodClass.java new file mode 100644 index 00000000000..72db9c4979d --- /dev/null +++ b/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleStaticMethodClass.java @@ -0,0 +1,26 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example.painlesswhitelist; + +public class ExampleStaticMethodClass { + public static int exampleAddInts(int x, int y) { + return x + y; + } +} diff --git a/plugins/examples/painless-whitelist/src/main/resources/org/elasticsearch/example/painlesswhitelist/example_whitelist.txt b/plugins/examples/painless-whitelist/src/main/resources/org/elasticsearch/example/painlesswhitelist/example_whitelist.txt index 7908d354175..99e5521aebf 100644 --- a/plugins/examples/painless-whitelist/src/main/resources/org/elasticsearch/example/painlesswhitelist/example_whitelist.txt +++ b/plugins/examples/painless-whitelist/src/main/resources/org/elasticsearch/example/painlesswhitelist/example_whitelist.txt @@ -39,4 +39,8 @@ class java.lang.String { # existing classes can be "augmented" to have additional methods, which take the object # to operate on as the first argument to a static method int org.elasticsearch.example.painlesswhitelist.ExampleWhitelistedClass toInt() +} + +static_import { + int exampleAddInts(int, int) from_class org.elasticsearch.example.painlesswhitelist.ExampleStaticMethodClass } \ No newline at end of file diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml new file mode 100644 index 00000000000..b6592637296 --- /dev/null +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml @@ -0,0 +1,26 @@ +# Example test using whitelisted statically imported method + +"custom static imported method": +- do: + index: + index: test + type: test + id: 1 + body: { "num1": 1 } +- do: + indices.refresh: {} + +- do: + index: test + search: + body: + query: + match_all: {} + script_fields: + sNum1: + script: + source: "exampleAddInts(2, (int)doc['num1'][0])" + lang: painless + +- match: { hits.total: 1 } +- match: { hits.hits.0.fields.sNum1.0: 3 } diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index f55104f2a96..6ffda42899e 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -23,23 +23,13 @@ esplugin { } versions << [ - 'tika': '1.18', - 'pdfbox': '2.0.9', + 'tika': '1.19.1', + 'pdfbox': '2.0.12', 'bouncycastle': '1.59', - 'poi': '3.17', - 'mime4j': '0.8.1' + 'poi': '4.0.0', + 'mime4j': '0.8.2' ] -if (rootProject.ext.compilerJavaVersion.isJava11()) { - // disabled until https://github.com/elastic/elasticsearch/issues/31456 is fixed. - integTestRunner { - systemProperty 'tests.rest.blacklist', [ - 'ingest_attachment/20_attachment_processor/Test indexed chars are configurable', - 'ingest_attachment/20_attachment_processor/Test indexed chars are configurable per document' - ].join(',') - } -} - dependencies { // mandatory for tika compile "org.apache.tika:tika-core:${versions.tika}" @@ -58,7 +48,7 @@ dependencies { // Adobe PDF compile "org.apache.pdfbox:pdfbox:${versions.pdfbox}" compile "org.apache.pdfbox:fontbox:${versions.pdfbox}" - compile "org.apache.pdfbox:jempbox:1.8.13" + compile "org.apache.pdfbox:jempbox:1.8.16" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "org.bouncycastle:bcmail-jdk15on:${versions.bouncycastle}" compile "org.bouncycastle:bcprov-jdk15on:${versions.bouncycastle}" @@ -68,12 +58,12 @@ dependencies { compile "org.apache.poi:poi:${versions.poi}" compile "org.apache.poi:poi-ooxml-schemas:${versions.poi}" compile "commons-codec:commons-codec:${versions.commonscodec}" - compile 'org.apache.xmlbeans:xmlbeans:2.6.0' + compile 'org.apache.xmlbeans:xmlbeans:3.0.1' compile 'org.apache.commons:commons-collections4:4.1' // MS Office compile "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork - compile 'org.apache.commons:commons-compress:1.16.1' + compile 'org.apache.commons:commons-compress:1.18' // Outlook documents compile "org.apache.james:apache-mime4j-core:${versions.mime4j}" compile "org.apache.james:apache-mime4j-dom:${versions.mime4j}" @@ -127,7 +117,13 @@ thirdPartyAudit.excludes = [ 'com.drew.metadata.iptc.IptcDirectory', 'com.drew.metadata.jpeg.JpegCommentDirectory', 'com.drew.metadata.jpeg.JpegDirectory', + 'com.epam.parso.Column', + 'com.epam.parso.DataWriterUtil', + 'com.epam.parso.SasFileProperties', + 'com.epam.parso.SasFileReader', + 'com.epam.parso.impl.SasFileReaderImpl', 'com.github.junrar.Archive', + 'com.github.junrar.impl.FileVolumeManager', 'com.github.junrar.rarfile.FileHeader', 'com.github.luben.zstd.ZstdInputStream', 'com.github.luben.zstd.ZstdOutputStream', @@ -158,27 +154,37 @@ thirdPartyAudit.excludes = [ 'com.graphbuilder.geom.PointFactory', 'com.healthmarketscience.jackcess.Column', 'com.healthmarketscience.jackcess.CryptCodecProvider', - 'com.healthmarketscience.jackcess.DataType', 'com.healthmarketscience.jackcess.Database', 'com.healthmarketscience.jackcess.DatabaseBuilder', - 'com.healthmarketscience.jackcess.PropertyMap$Property', + 'com.healthmarketscience.jackcess.DataType', + 'com.healthmarketscience.jackcess.impl.ByteUtil', + 'com.healthmarketscience.jackcess.impl.CustomToStringStyle', + 'com.healthmarketscience.jackcess.impl.PageChannel', 'com.healthmarketscience.jackcess.PropertyMap', - 'com.healthmarketscience.jackcess.Row', - 'com.healthmarketscience.jackcess.Table', + 'com.healthmarketscience.jackcess.PropertyMap$Property', 'com.healthmarketscience.jackcess.query.Query', + 'com.healthmarketscience.jackcess.Row', + 'com.healthmarketscience.jackcess.RuntimeIOException', + 'com.healthmarketscience.jackcess.Table', 'com.healthmarketscience.jackcess.util.LinkResolver', + 'com.healthmarketscience.jackcess.util.MemFileChannel', + 'com.healthmarketscience.jackcess.util.OleBlob', + 'com.healthmarketscience.jackcess.util.OleBlob$Builder', 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent', + 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent$Entry', 'com.healthmarketscience.jackcess.util.OleBlob$Content', 'com.healthmarketscience.jackcess.util.OleBlob$ContentType', + 'com.healthmarketscience.jackcess.util.OleBlob$EmbeddedContent', 'com.healthmarketscience.jackcess.util.OleBlob$LinkContent', 'com.healthmarketscience.jackcess.util.OleBlob$OtherContent', + 'com.healthmarketscience.jackcess.util.OleBlob$PackageContent', 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', - 'com.healthmarketscience.jackcess.util.OleBlob', 'com.healthmarketscience.jackcess.util.TableIterableBuilder', 'com.jmatio.io.MatFileHeader', 'com.jmatio.io.MatFileReader', 'com.jmatio.types.MLArray', 'com.jmatio.types.MLStructure', + 'com.microsoft.schemas.compatibility.impl.AlternateContentDocumentImpl$AlternateContentImpl$1ChoiceList', 'com.microsoft.schemas.office.excel.STCF', 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1Accel2List', 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AccelList', @@ -384,7 +390,6 @@ thirdPartyAudit.excludes = [ 'com.microsoft.schemas.office.word.CTBorder', 'com.microsoft.schemas.office.word.CTWrap', 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureComments', 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', 'com.microsoft.schemas.office.x2006.digsig.STSignatureText', 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', @@ -530,7 +535,6 @@ thirdPartyAudit.excludes = [ 'javax.servlet.ServletContextListener', 'javax.ws.rs.core.Response', 'javax.ws.rs.core.UriBuilder', - 'junit.framework.TestCase', 'opennlp.tools.namefind.NameFinderME', 'opennlp.tools.namefind.TokenNameFinderModel', 'opennlp.tools.sentiment.SentimentME', @@ -545,8 +549,14 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.exec.ExecuteWatchdog', 'org.apache.commons.exec.PumpStreamHandler', 'org.apache.commons.exec.environment.EnvironmentUtils', + 'org.apache.commons.lang.builder.ToStringBuilder', + 'org.apache.commons.lang.NotImplementedException', 'org.apache.commons.lang.StringUtils', 'org.apache.commons.lang.SystemUtils', + 'org.apache.commons.math3.linear.Array2DRowRealMatrix', + 'org.apache.commons.math3.linear.LUDecomposition', + 'org.apache.commons.math3.linear.MatrixUtils', + 'org.apache.commons.math3.linear.RealMatrix', 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', 'org.apache.cxf.jaxrs.client.WebClient', @@ -563,10 +573,10 @@ thirdPartyAudit.excludes = [ 'org.apache.http.entity.ByteArrayEntity', 'org.apache.http.impl.client.DefaultHttpClient', 'org.apache.jcp.xml.dsig.internal.dom.ApacheNodeSetData', - 'org.apache.jcp.xml.dsig.internal.dom.DOMDigestMethod', 'org.apache.jcp.xml.dsig.internal.dom.DOMKeyInfo', 'org.apache.jcp.xml.dsig.internal.dom.DOMReference', 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', + 'org.apache.jcp.xml.dsig.internal.dom.DOMSubTreeData', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', 'org.apache.pdfbox.tools.imageio.ImageIOUtil', @@ -606,10 +616,9 @@ thirdPartyAudit.excludes = [ 'org.apache.xml.security.Init', 'org.apache.xml.security.c14n.Canonicalizer', 'org.apache.xml.security.signature.XMLSignatureInput', - 'org.apache.xml.security.utils.Base64', + 'org.apache.xml.security.utils.XMLUtils', 'org.brotli.dec.BrotliInputStream', - 'org.etsi.uri.x01903.v13.AnyType', - 'org.etsi.uri.x01903.v13.ClaimedRolesListType', + 'org.etsi.uri.x01903.v13.CertifiedRolesListType', 'org.etsi.uri.x01903.v13.CounterSignatureType', 'org.etsi.uri.x01903.v13.DataObjectFormatType$Factory', 'org.etsi.uri.x01903.v13.DataObjectFormatType', @@ -623,8 +632,8 @@ thirdPartyAudit.excludes = [ 'org.etsi.uri.x01903.v13.SignaturePolicyIdType', 'org.etsi.uri.x01903.v13.SignatureProductionPlaceType', 'org.etsi.uri.x01903.v13.SignedDataObjectPropertiesType', - 'org.etsi.uri.x01903.v13.SignerRoleType', 'org.etsi.uri.x01903.v13.UnsignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.impl.ClaimedRolesListTypeImpl$1ClaimedRoleList', 'org.etsi.uri.x01903.v13.impl.CRLRefsTypeImpl$1CRLRefList', 'org.etsi.uri.x01903.v13.impl.CRLValuesTypeImpl$1EncapsulatedCRLValueList', 'org.etsi.uri.x01903.v13.impl.CertIDListTypeImpl$1CertList', @@ -654,10 +663,6 @@ thirdPartyAudit.excludes = [ 'org.json.simple.JSONArray', 'org.json.simple.JSONObject', 'org.json.simple.parser.JSONParser', - 'org.junit.Test', - 'org.junit.internal.TextListener', - 'org.junit.runner.JUnitCore', - 'org.junit.runner.Result', 'org.objectweb.asm.AnnotationVisitor', 'org.objectweb.asm.Attribute', 'org.objectweb.asm.ClassReader', @@ -696,40 +701,32 @@ thirdPartyAudit.excludes = [ // http://poi.apache.org/faq.html#faq-N10025 'org.openxmlformats.schemas.drawingml.x2006.chart.CTArea3DChart', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAreaChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAxisUnit', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBar3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBarChart', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBubbleChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTChartLines', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbls', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispBlanksAs', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispUnits', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbl', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLblPos', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDoughnutChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTErrBars', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExternalData', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtension', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTFirstSliceAng', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTGrouping', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblAlgn', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblOffset', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLegendEntry', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLine3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMarkerSize', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMultiLvlStrRef', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOfPieChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOverlap', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPictureOptions', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPie3DChart', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotFmts', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotSource', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTProtection', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRadarChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRelId', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSerAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTShape', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSkip', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStockChart', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStyle', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface3DChart', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurfaceChart', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTextLanguageID', @@ -737,14 +734,19 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTrendline', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTUpDownBars', 'org.openxmlformats.schemas.drawingml.x2006.chart.CTView3D', - 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', + 'org.openxmlformats.schemas.drawingml.x2006.chart.STAxisUnit', + 'org.openxmlformats.schemas.drawingml.x2006.chart.STMarkerSize', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarChartImpl$1SerLinesList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTDLblsImpl$1DLblList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTExtensionListImpl$1ExtList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLegendImpl$1LegendEntryList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1SerList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1DPtList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1TrendlineList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTNumDataImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieChartImpl$1SerList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieSerImpl$1DPtList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Area3DChartList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1AreaChartList', @@ -766,35 +768,41 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Surface3DChartList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SurfaceChartList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ValAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTRadarChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTRadarSerImpl$1DPtList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1SerList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1DPtList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1ErrBarsList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1TrendlineList', 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTStrDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaBiLevelEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaCeilingEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaFloorEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaInverseEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaModulateEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaOutsetEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaReplaceEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAngle', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioCD', 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBackdrop', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBevel', 'org.openxmlformats.schemas.drawingml.x2006.main.CTBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlendEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlurEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCamera', 'org.openxmlformats.schemas.drawingml.x2006.main.CTCell3D', 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorChangeEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorReplaceEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorSchemeList', 'org.openxmlformats.schemas.drawingml.x2006.main.CTComplementTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectionSite', 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectorLocking', 'org.openxmlformats.schemas.drawingml.x2006.main.CTCustomColorList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTDashStopList', 'org.openxmlformats.schemas.drawingml.x2006.main.CTDuotoneEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectContainer', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectReference', 'org.openxmlformats.schemas.drawingml.x2006.main.CTEmbeddedWAVAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillOverlayEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTFlatText', 'org.openxmlformats.schemas.drawingml.x2006.main.CTGammaTransform', @@ -806,49 +814,27 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.main.CTInnerShadowEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseGammaTransform', 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTLineJoinBevel', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTLineJoinMiterProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLightRig', 'org.openxmlformats.schemas.drawingml.x2006.main.CTLuminanceEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTObjectStyleDefaults', 'org.openxmlformats.schemas.drawingml.x2006.main.CTPath2DArcTo', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPolarAdjustHandle', 'org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveFixedAngle', 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetShadowEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetTextShape', 'org.openxmlformats.schemas.drawingml.x2006.main.CTQuickTimeFile', 'org.openxmlformats.schemas.drawingml.x2006.main.CTReflectionEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTScene3D', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTShape3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTRelativeOffsetEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTShapeLocking', 'org.openxmlformats.schemas.drawingml.x2006.main.CTSoftEdgesEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTSupplementalFont', 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableBackgroundStyle', 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableCellBorderStyle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableStyleTextStyle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBlipBullet', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletColorFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletSizeFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletTypefaceFollowText', 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillFollowText', 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillGroupWrapper', 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineLineFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTileInfoProperties', 'org.openxmlformats.schemas.drawingml.x2006.main.CTTintEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTransformEffect', 'org.openxmlformats.schemas.drawingml.x2006.main.CTVideoFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTXYAdjustHandle', - 'org.openxmlformats.schemas.drawingml.x2006.main.STBlackWhiteMode', - 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', - 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.STOnOffStyleType$Enum', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPresetPatternVal$Enum', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPresetPatternVal', - 'org.openxmlformats.schemas.drawingml.x2006.main.STRectAlignment', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextTabAlignType', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTileFlipMode', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhPolarList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhXYList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1BlipFillList', @@ -875,6 +861,37 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1LumList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1TintList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTConnectionSiteListImpl$1CxnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTDashStopListImpl$1DsList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaBiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaCeilingList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaFloorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaInvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaModFixList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaOutsetList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1BiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1BlendList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1BlurList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ClrChangeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ClrReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ContList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1DuotoneList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1EffectList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1FillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1FillOverlayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1GlowList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1GraysclList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1HslList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1InnerShdwList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1OuterShdwList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1PrstShdwList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ReflectionList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1RelOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1SoftEdgeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1XfrmList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectStyleListImpl$1EffectStyleList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1BlipFillList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GradFillList', @@ -951,62 +968,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatOffList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1ShadeList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1TintList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaModList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaOffList', @@ -1035,6 +996,62 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatOffList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1ShadeList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaModList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaOffList', @@ -1064,14 +1081,21 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1ShadeList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1TintList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableGridImpl$1GridColList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableImpl$1TrList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableRowImpl$1TcList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableStyleListImpl$1TblStyleList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextBodyImpl$1PList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1BrList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1FldList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1RList', 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextTabStopListImpl$1TabList', + 'org.openxmlformats.schemas.drawingml.x2006.main.STAdjAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', + 'org.openxmlformats.schemas.drawingml.x2006.main.STEffectContainerType', + 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPresetMaterialType', + 'org.openxmlformats.schemas.drawingml.x2006.main.STShapeID', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1AbsoluteAnchorList', 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1OneCellAnchorList', 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1TwoCellAnchorList', @@ -1166,9 +1190,10 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionList', 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionListModify', 'org.openxmlformats.schemas.presentationml.x2006.main.CTHandoutMasterIdList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTHeaderFooter', 'org.openxmlformats.schemas.presentationml.x2006.main.CTKinsoku', 'org.openxmlformats.schemas.presentationml.x2006.main.CTModifyVerifier', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTOleObjectEmbed', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTOleObjectLink', 'org.openxmlformats.schemas.presentationml.x2006.main.CTPhotoAlbum', 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideLayoutIdList', 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTiming', @@ -1177,7 +1202,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.presentationml.x2006.main.STBookmarkIdSeed', 'org.openxmlformats.schemas.presentationml.x2006.main.STDirection', 'org.openxmlformats.schemas.presentationml.x2006.main.STIndex', - 'org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderSize', 'org.openxmlformats.schemas.presentationml.x2006.main.STSlideSizeType', 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', @@ -1228,7 +1252,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleLink', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleSize', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPCDKPIs', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPhoneticRun', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotFilters', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotHierarchies', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotSelection', @@ -1337,14 +1360,12 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowFieldsImpl$1FieldList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowImpl$1CList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RPhList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1BList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1DList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1EList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1MList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1NList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1SList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetDataImpl$1RowList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1PivotSelectionList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1SelectionList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewsImpl$1SheetViewList', @@ -1358,7 +1379,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ConditionalFormattingList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAltChunk', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAttr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBackground', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCaptions', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCellMergeTrackChange', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCharacterSpacing', @@ -1379,7 +1399,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEastAsianLayout', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnDocProps', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEm', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFDDList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFHelpText', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFName', @@ -1422,10 +1441,8 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectType', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShapeDefaults', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShortHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSignedTwipsMeasure', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSmartTagType', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblGridChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblLayoutType', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblOverlap', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPPr', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrChange', @@ -1435,7 +1452,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcPrChange', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextDirection', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextEffect', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextScale', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextboxTightWrap', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrPrChange', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangeNumbering', @@ -1446,7 +1462,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWritingStyle', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDateTime', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDisplacedByCustomXml', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHeightRule', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHint', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabAlignment', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabLeader', @@ -1454,7 +1469,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STProofErr', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STRubyAlign', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STShortHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STThemeColor', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STUcharHexNumber', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STZoom', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTAbstractNumImpl$1LvlList', @@ -1521,7 +1535,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentsImpl$1CommentList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1AnchorList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1InlineList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTEndnotesImpl$1EndnoteList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1CalcOnExitList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1DdListList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EnabledList', @@ -1531,7 +1544,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1NameList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1StatusTextList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1TextInputList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFootnotesImpl$1FootnoteList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1AltChunkList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkEndList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkStartList', @@ -1670,11 +1682,7 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelInstrTextList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelTextList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DrawingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteRefList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteReferenceList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FldCharList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteRefList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteReferenceList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1InstrTextList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1LastRenderedPageBreakList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthLongList', @@ -1719,7 +1727,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1ProofErrList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1TcList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1BookmarkEndList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1BookmarkStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CommentRangeEndList', @@ -2007,7 +2014,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1ProofErrList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1TrList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1AltChunkList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkEndList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkStartList', @@ -2032,7 +2038,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermEndList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1ProofErrList', @@ -2092,6 +2097,7 @@ thirdPartyAudit.excludes = [ 'org.sqlite.SQLiteConfig', 'org.w3.x2000.x09.xmldsig.KeyInfoType', 'org.w3.x2000.x09.xmldsig.SignatureMethodType', + 'org.w3.x2000.x09.xmldsig.SignatureValueType', 'org.w3.x2000.x09.xmldsig.TransformsType', 'org.w3.x2000.x09.xmldsig.impl.SignatureTypeImpl$1ObjectList', 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.1.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.1.jar.sha1 deleted file mode 100644 index 6ae3e58d22b..00000000000 --- a/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c62dfe18a3b827a2c626ade0ffba44562ddf3f61 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.2.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.2.jar.sha1 new file mode 100644 index 00000000000..da79885de91 --- /dev/null +++ b/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.2.jar.sha1 @@ -0,0 +1 @@ +94919d81969c67c5894646338bf10fbc35f5a946 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.1.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.1.jar.sha1 deleted file mode 100644 index 408dfe12ef2..00000000000 --- a/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f2d653c617004193f3350330d907f77b60c88c56 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.2.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.2.jar.sha1 new file mode 100644 index 00000000000..f88f3fa3f37 --- /dev/null +++ b/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.2.jar.sha1 @@ -0,0 +1 @@ +32c9a9afe84eca86a3b0b3c66a956ced249ceade \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 deleted file mode 100644 index 93be07c90a4..00000000000 --- a/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b5cdabadb4cf12f5ee0f801399e70635583193f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.18.jar.sha1 new file mode 100644 index 00000000000..b4d4d68d9dc --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-compress-1.18.jar.sha1 @@ -0,0 +1 @@ +1191f9f2bc0c47a8cce69193feb1ff0a8bcb37d5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.12.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.12.jar.sha1 new file mode 100644 index 00000000000..d342b59edfb --- /dev/null +++ b/plugins/ingest-attachment/licenses/fontbox-2.0.12.jar.sha1 @@ -0,0 +1 @@ +566fd1d6b25012bb82078da08b82e6d0ba8c884a \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 deleted file mode 100644 index 4ded3b54888..00000000000 --- a/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f961f17ebdbc307e9055e3cf7c0e207f0895ae55 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/jempbox-1.8.13.jar.sha1 b/plugins/ingest-attachment/licenses/jempbox-1.8.13.jar.sha1 deleted file mode 100644 index 2593719dfb3..00000000000 --- a/plugins/ingest-attachment/licenses/jempbox-1.8.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a874cef0ed0e2a8c4cc5ed52c23ba3e6d78eca4e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/jempbox-1.8.16.jar.sha1 b/plugins/ingest-attachment/licenses/jempbox-1.8.16.jar.sha1 new file mode 100644 index 00000000000..aba5a49037c --- /dev/null +++ b/plugins/ingest-attachment/licenses/jempbox-1.8.16.jar.sha1 @@ -0,0 +1 @@ +1f41de81768ef84ca2d8cda4cb79e9272c8ee966 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.12.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.12.jar.sha1 new file mode 100644 index 00000000000..e297ab7f91c --- /dev/null +++ b/plugins/ingest-attachment/licenses/pdfbox-2.0.12.jar.sha1 @@ -0,0 +1 @@ +a7311cd267c19e1ba8154b076a63d29537154784 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 deleted file mode 100644 index 9bf91e07976..00000000000 --- a/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0425578218624388f2ec84a0b3a11efd55df0f5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-3.17.jar.sha1 b/plugins/ingest-attachment/licenses/poi-3.17.jar.sha1 deleted file mode 100644 index bd472c0bec7..00000000000 --- a/plugins/ingest-attachment/licenses/poi-3.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0ae92292a2043888b40d418da97dc0b669fde326 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-4.0.0.jar.sha1 b/plugins/ingest-attachment/licenses/poi-4.0.0.jar.sha1 new file mode 100644 index 00000000000..baab2728481 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-4.0.0.jar.sha1 @@ -0,0 +1 @@ +7ddb9b983ed682c93a986e8bb596d5935b13086c \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-3.17.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-3.17.jar.sha1 deleted file mode 100644 index 37c5e068814..00000000000 --- a/plugins/ingest-attachment/licenses/poi-ooxml-3.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -07d8c44407178b73246462842bf1e206e99c8e0a \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-4.0.0.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-4.0.0.jar.sha1 new file mode 100644 index 00000000000..1baa4d062de --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-ooxml-4.0.0.jar.sha1 @@ -0,0 +1 @@ +f3fa9c2bd64eb3ec15378de960a07d077ae5b26d \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.17.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.17.jar.sha1 deleted file mode 100644 index 744e323e5d7..00000000000 --- a/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -890114bfa82f5b6380ea0e9b0bf49b0af797b414 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-schemas-4.0.0.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-schemas-4.0.0.jar.sha1 new file mode 100644 index 00000000000..5d2e3c79308 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-ooxml-schemas-4.0.0.jar.sha1 @@ -0,0 +1 @@ +125f9ccd2cf652fa4169b1c30e9023362e23324f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-scratchpad-3.17.jar.sha1 b/plugins/ingest-attachment/licenses/poi-scratchpad-3.17.jar.sha1 deleted file mode 100644 index 16686b3e89b..00000000000 --- a/plugins/ingest-attachment/licenses/poi-scratchpad-3.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -85d86a0e26c7f5c0db4ee63e8c7728e51c5d64ce \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-scratchpad-4.0.0.jar.sha1 b/plugins/ingest-attachment/licenses/poi-scratchpad-4.0.0.jar.sha1 new file mode 100644 index 00000000000..7cd201c3c8f --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-scratchpad-4.0.0.jar.sha1 @@ -0,0 +1 @@ +1038d3bb1ec34e93c184b4c5b690e2f51c6f7a60 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 deleted file mode 100644 index ef162f03439..00000000000 --- a/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -69556697de96cf0b22df846e970dafd29866eee0 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.19.1.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.19.1.jar.sha1 new file mode 100644 index 00000000000..0145026a76e --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-1.19.1.jar.sha1 @@ -0,0 +1 @@ +c1f075aa01586c2c28a249ad60bcfb733b69b866 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 deleted file mode 100644 index 6441e8b64e7..00000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7d9b6dea91d783165f3313d320d3aaaa9a4dfc13 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.19.1.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.19.1.jar.sha1 new file mode 100644 index 00000000000..a3a804cb87c --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-1.19.1.jar.sha1 @@ -0,0 +1 @@ +06d45a8683a7479f0e0d9d252f834d0ae44abd6b \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xmlbeans-2.6.0.jar.sha1 b/plugins/ingest-attachment/licenses/xmlbeans-2.6.0.jar.sha1 deleted file mode 100644 index d27c56f66cb..00000000000 --- a/plugins/ingest-attachment/licenses/xmlbeans-2.6.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -29e80d2dd51f9dcdef8f9ffaee0d4dc1c9bbfc87 diff --git a/plugins/ingest-attachment/licenses/xmlbeans-3.0.1.jar.sha1 b/plugins/ingest-attachment/licenses/xmlbeans-3.0.1.jar.sha1 new file mode 100644 index 00000000000..e1c74c67f21 --- /dev/null +++ b/plugins/ingest-attachment/licenses/xmlbeans-3.0.1.jar.sha1 @@ -0,0 +1 @@ +50d94da791ab1e799a11d6f82410fd7d49f402ca \ No newline at end of file diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 80ad1fbca87..654bc361f53 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.attachment; import org.apache.commons.io.IOUtils; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; @@ -297,7 +296,6 @@ public class AttachmentProcessorTests extends ESTestCase { } public void testIndexedChars() throws Exception { - assumeFalse("https://github.com/elastic/elasticsearch/issues/31305", JavaVersion.current().equals(JavaVersion.parse("11"))); processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 19, false, null); diff --git a/plugins/ingest-user-agent/src/main/resources/regexes.yml b/plugins/ingest-user-agent/src/main/resources/regexes.yml index 6c3369dc2f7..d9c16403cb8 100644 --- a/plugins/ingest-user-agent/src/main/resources/regexes.yml +++ b/plugins/ingest-user-agent/src/main/resources/regexes.yml @@ -748,7 +748,7 @@ os_parsers: # possibility of false positive when different marketing names share same NT kernel # e.g. windows server 2003 and windows xp # lots of ua strings have Windows NT 4.1 !?!?!?!? !?!? !? !????!?! !!! ??? !?!?! ? - # (very) roughly ordered in terms of frequency of occurence of regex (win xp currently most frequent, etc) + # (very) roughly ordered in terms of frequency of occurrence of regex (win xp currently most frequent, etc) ########## # ie mobile desktop mode @@ -2848,7 +2848,7 @@ device_parsers: device_replacement: 'Micromax $1' brand_replacement: 'Micromax' model_replacement: '$1' - # be carefull here with Acer e.g. A500 + # be careful here with Acer e.g. A500 - regex: '; *(A\d{2}|A[12]\d{2}|A90S|A110Q) Build' regex_flag: 'i' device_replacement: 'Micromax $1' diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index d6d1793066a..97ce4330fb5 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -29,13 +29,13 @@ dependencies { compile "org.elasticsearch:elasticsearch-nio:${version}" // network stack - compile "io.netty:netty-buffer:4.1.29.Final" - compile "io.netty:netty-codec:4.1.29.Final" - compile "io.netty:netty-codec-http:4.1.29.Final" - compile "io.netty:netty-common:4.1.29.Final" - compile "io.netty:netty-handler:4.1.29.Final" - compile "io.netty:netty-resolver:4.1.29.Final" - compile "io.netty:netty-transport:4.1.29.Final" + compile "io.netty:netty-buffer:${versions.netty}" + compile "io.netty:netty-codec:${versions.netty}" + compile "io.netty:netty-codec-http:${versions.netty}" + compile "io.netty:netty-common:${versions.netty}" + compile "io.netty:netty-handler:${versions.netty}" + compile "io.netty:netty-resolver:${versions.netty}" + compile "io.netty:netty-transport:${versions.netty}" } dependencyLicenses { @@ -90,6 +90,7 @@ thirdPartyAudit.excludes = [ // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional 'org.slf4j.Logger', 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', 'com.google.protobuf.ExtensionRegistryLite', 'com.google.protobuf.MessageLiteOrBuilder', @@ -124,6 +125,7 @@ thirdPartyAudit.excludes = [ 'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$2', 'io.netty.util.internal.PlatformDependent0$3', + 'io.netty.util.internal.PlatformDependent0$5', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 deleted file mode 100644 index 17798a82aa7..00000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..070bb3f8332 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +597adb653306470fb3ec1af3c0f3f30a37b1310a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 deleted file mode 100644 index f892420795b..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1651bc2e279216773c234cafe402d68d2a5adc90 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..cd0786f3e9f --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +515c8f609aaca28a94f984d89a9667dd3359c1b1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 deleted file mode 100644 index aa97345bad1..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -454688b88cea27a4d407202d1fc79a6522345b5e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..e795cce1ba0 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +1384c630e8a0eeef33ad12a28791dce6e1d8767c \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 deleted file mode 100644 index 47140876e6a..00000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5d6a735ed07d8f197daa48db7f097cfc971ee5e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..079e35ecc4c --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +5dca0c34d8f38af51a2398614e81888f51cf811a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 deleted file mode 100644 index 7c2d407f75e..00000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1acf1d94799296a2517533ec75ce7e155e9c4ea7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..50b87cdac13 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +ecc076332ed103411347f4806a44ee32d9d9cb5f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 deleted file mode 100644 index bac08f57079..00000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bbec1dc913732e4773893c14d795b15d6c1e878e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..2ef4d895143 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +5106fd687066ffd712e5295d32af4e2ac6482613 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 deleted file mode 100644 index 0ce64132afb..00000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c190b90f70e2ae8a48c068afad709e8728fcaa39 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.30.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.30.Final.jar.sha1 new file mode 100644 index 00000000000..e4febd661cb --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.30.Final.jar.sha1 @@ -0,0 +1 @@ +3d27bb432a3b125167ac161b26415ad29ec17f02 \ No newline at end of file diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 7efebd1d54a..6df1854cc22 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -777,33 +777,34 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { assertTrue("expected to find a primary but didn't\n" + recoveryResponse, foundPrimary); assertEquals("mismatch while checking for translog recovery\n" + recoveryResponse, shouldHaveTranslog, restoredFromTranslog); - String currentLuceneVersion = Version.CURRENT.luceneVersion.toString(); - String bwcLuceneVersion = getOldClusterVersion().luceneVersion.toString(); - if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { - int numCurrentVersion = 0; - int numBwcVersion = 0; - Request segmentsRequest = new Request("GET", "/_cat/segments/" + index); - segmentsRequest.addParameter("h", "prirep,shard,index,version"); - segmentsRequest.addParameter("s", "prirep,shard,index"); - String segmentsResponse = toStr(client().performRequest(segmentsRequest)); - for (String line : segmentsResponse.split("\n")) { - if (false == line.startsWith("p")) { - continue; - } - Matcher m = Pattern.compile("(\\d+\\.\\d+\\.\\d+)$").matcher(line); - assertTrue(line, m.find()); - String version = m.group(1); - if (currentLuceneVersion.equals(version)) { - numCurrentVersion++; - } else if (bwcLuceneVersion.equals(version)) { - numBwcVersion++; - } else { - fail("expected version to be one of [" + currentLuceneVersion + "," + bwcLuceneVersion + "] but was " + line); + String currentLuceneVersion = Version.CURRENT.luceneVersion.toString(); + String bwcLuceneVersion = getOldClusterVersion().luceneVersion.toString(); + if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { + int numCurrentVersion = 0; + int numBwcVersion = 0; + Request segmentsRequest = new Request("GET", "/_cat/segments/" + index); + segmentsRequest.addParameter("h", "prirep,shard,index,version"); + segmentsRequest.addParameter("s", "prirep,shard,index"); + String segmentsResponse = toStr(client().performRequest(segmentsRequest)); + for (String line : segmentsResponse.split("\n")) { + if (false == line.startsWith("p")) { + continue; + } + Matcher m = Pattern.compile("(\\d+\\.\\d+\\.\\d+)$").matcher(line); + assertTrue(line, m.find()); + String version = m.group(1); + if (currentLuceneVersion.equals(version)) { + numCurrentVersion++; + } else if (bwcLuceneVersion.equals(version)) { + numBwcVersion++; + } else { + fail("expected version to be one of [" + currentLuceneVersion + "," + bwcLuceneVersion + "] but was " + line); + } } + assertNotEquals("expected at least 1 current segment after translog recovery. segments:\n" + segmentsResponse, + 0, numCurrentVersion); + assertNotEquals("expected at least 1 old segment. segments:\n" + segmentsResponse, 0, numBwcVersion); } - assertNotEquals("expected at least 1 current segment after translog recovery. segments:\n" + segmentsResponse, - 0, numCurrentVersion); - assertNotEquals("expected at least 1 old segment. segments:\n" + segmentsResponse, 0, numBwcVersion);} } } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml index 04d85eb6078..9b899fe800c 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml @@ -1,5 +1,5 @@ --- -"Create things in the cluster state that we'll validate are there after the ugprade": +"Create things in the cluster state that we'll validate are there after the upgrade": - do: snapshot.create_repository: repository: my_repo diff --git a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats index a7628d08bba..db062eb337e 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats @@ -190,7 +190,7 @@ setup() { @test "[SYSTEMD] start Elasticsearch with custom JVM options" { assert_file_exist $ESENVFILE # The custom config directory is not under /tmp or /var/tmp because - # systemd's private temp directory functionaly means different + # systemd's private temp directory functionally means different # processes can have different views of what's in these directories local temp=`mktemp -p /etc -d` cp "$ESCONFIG"/elasticsearch.yml "$temp" diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index 9a1ff6f2e23..d4ef82c4a18 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -97,7 +97,7 @@ fi rm -rf "$ESPLUGINS" # The custom plugins directory is not under /tmp or /var/tmp because - # systemd's private temp directory functionaly means different + # systemd's private temp directory functionally means different # processes can have different views of what's in these directories local es_plugins=$(mktemp -p /var -d -t 'plugins.XXXX') chown -R elasticsearch:elasticsearch "$es_plugins" @@ -225,10 +225,10 @@ fi } @test "[$GROUP] install ingest-attachment plugin" { - # we specify the version on the poi-3.17.jar so that the test does + # we specify the version on the poi-4.0.0.jar so that the test does # not spuriously pass if the jar is missing but the other poi jars # are present - install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-3.17.jar poi-ooxml-3.17.jar poi-ooxml-schemas-*.jar poi-scratchpad-*.jar + install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-4.0.0.jar poi-ooxml-4.0.0.jar poi-ooxml-schemas-*.jar poi-scratchpad-*.jar } @test "[$GROUP] install ingest-geoip plugin" { diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash index cb71e9e6ec1..f5a9f25df16 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash @@ -556,7 +556,7 @@ run_elasticsearch_tests() { move_config() { local oldConfig="$ESCONFIG" # The custom config directory is not under /tmp or /var/tmp because - # systemd's private temp directory functionaly means different + # systemd's private temp directory functionally means different # processes can have different views of what's in these directories export ESCONFIG="${1:-$(mktemp -p /etc -d -t 'config.XXXX')}" echo "Moving configuration directory from $oldConfig to $ESCONFIG" diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index b3ec72d5270..cc1bb9bb17f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -20,6 +20,7 @@ package org.elasticsearch.action; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; @@ -200,7 +201,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.NamedRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -336,7 +336,7 @@ import static java.util.Collections.unmodifiableMap; */ public class ActionModule extends AbstractModule { - private static final Logger logger = ESLoggerFactory.getLogger(ActionModule.class); + private static final Logger logger = LogManager.getLogger(ActionModule.class); private final boolean transportClient; private final Settings settings; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 9c134ba4012..e6eab5e8914 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.bulk; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.DocWriteRequest; @@ -48,7 +49,6 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; @@ -80,7 +80,7 @@ public class TransportShardBulkAction extends TransportWriteAction { /** * Returns an iterator over active and initializing shards, ordered by the adaptive replica - * selection forumla. Making sure though that its random within the active shards of the same + * selection formula. Making sure though that its random within the active shards of the same * (or missing) rank, and initializing shards are the last to iterate through. */ public ShardIterator activeInitializingShardsRankedIt(@Nullable ResponseCollectorService collector, diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 6388853d00b..20f52a742a2 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -26,17 +26,17 @@ import org.apache.logging.log4j.spi.ExtendedLogger; /** * Factory to get {@link Logger}s */ -public final class ESLoggerFactory { +final class ESLoggerFactory { private ESLoggerFactory() { } - public static Logger getLogger(String prefix, String name) { + static Logger getLogger(String prefix, String name) { return getLogger(prefix, LogManager.getLogger(name)); } - public static Logger getLogger(String prefix, Class clazz) { + static Logger getLogger(String prefix, Class clazz) { /* * At one point we didn't use LogManager.getLogger(clazz) because * of a bug in log4j that has since been fixed: @@ -49,7 +49,7 @@ public final class ESLoggerFactory { return getLogger(prefix, LogManager.getLogger(clazz.getName())); } - public static Logger getLogger(String prefix, Logger logger) { + static Logger getLogger(String prefix, Logger logger) { /* * In a followup we'll throw an exception if prefix is null or empty * redirecting folks to LogManager.getLogger. @@ -61,13 +61,4 @@ public final class ESLoggerFactory { } return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); } - - /** - * Get or build a logger. - * @deprecated Prefer {@link LogManager#getLogger} - */ - @Deprecated - public static Logger getLogger(Class clazz) { - return getLogger(null, clazz); - } } diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index a772867c084..e5031f4608e 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -28,7 +28,6 @@ import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.LoggerConfig; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -65,16 +64,6 @@ public class Loggers { return getLogger(clazz, asArrayList(Loggers.SPACE, index.getName(), prefixes).toArray(new String[0])); } - /** - * Get a logger. - * @deprecated prefer {@link #getLogger(Class, String...)} or {@link LogManager#getLogger} - * as the Settings is no longer needed - */ - @Deprecated - public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); - } - public static Logger getLogger(Class clazz, String... prefixes) { return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); } @@ -102,7 +91,7 @@ public class Loggers { */ @Deprecated public static Logger getLogger(Class clazz) { - return ESLoggerFactory.getLogger(clazz); + return ESLoggerFactory.getLogger(null, clazz); } private static String formatPrefix(String... prefixes) { diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java index b2cb319071f..1e997cce23b 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java @@ -51,7 +51,7 @@ public interface DateMathParser { * * Examples are * - * 2014-11-18||-2y substracts two years from the input date + * 2014-11-18||-2y subtracts two years from the input date * now/m rounds the current time to minute granularity * * Supported rounding units are diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java index d50f57aaafa..4d6bd51c5c3 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.common.collect.Tuple; import java.util.ArrayList; @@ -47,7 +48,7 @@ public final class ListenableFuture extends BaseFuture implements ActionLi * If the future has completed, the listener will be notified immediately without forking to * a different thread. */ - public void addListener(ActionListener listener, ExecutorService executor) { + public void addListener(ActionListener listener, ExecutorService executor, ThreadContext threadContext) { if (done) { // run the callback directly, we don't hold the lock and don't need to fork! notifyListener(listener, EsExecutors.newDirectExecutorService()); @@ -59,7 +60,7 @@ public final class ListenableFuture extends BaseFuture implements ActionLi if (done) { run = true; } else { - listeners.add(new Tuple<>(listener, executor)); + listeners.add(new Tuple<>(ContextPreservingActionListener.wrapPreservingContext(listener, threadContext), executor)); run = false; } } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java index 8ebd79dd4b3..75c085d3bdd 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java @@ -20,9 +20,9 @@ package org.elasticsearch.common.util.concurrent; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.ExponentiallyWeightedMovingAverage; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; import java.util.Locale; @@ -41,8 +41,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto // This is a random starting point alpha. TODO: revisit this with actual testing and/or make it configurable public static double EWMA_ALPHA = 0.3; - private static final Logger logger = - ESLoggerFactory.getLogger(QueueResizingEsThreadPoolExecutor.class); + private static final Logger logger = LogManager.getLogger(QueueResizingEsThreadPoolExecutor.class); // The amount the queue size is adjusted by for each calcuation private static final int QUEUE_ADJUSTMENT_AMOUNT = 50; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 490f3d680e4..f2b1c209cd9 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -18,12 +18,13 @@ */ package org.elasticsearch.common.util.concurrent; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -83,6 +84,7 @@ public final class ThreadContext implements Closeable, Writeable { public static final String PREFIX = "request.headers"; public static final Setting DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", Property.NodeScope); + private static final Logger logger = LogManager.getLogger(ThreadContext.class); private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(); private final Map defaultHeader; private final ContextThreadLocal threadLocal; @@ -469,18 +471,16 @@ public final class ThreadContext implements Closeable, Writeable { //check if we can add another warning header - if max size within limits if (key.equals("Warning") && (maxWarningHeaderSize != -1)) { //if size is NOT unbounded, check its limits if (warningHeadersSize > maxWarningHeaderSize) { // if max size has already been reached before - final String message = "Dropping a warning header, as their total size reached the maximum allowed of [" + - maxWarningHeaderSize + "] bytes set in [" + - HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!"; - ESLoggerFactory.getLogger(ThreadContext.class).warn(message); + logger.warn("Dropping a warning header, as their total size reached the maximum allowed of [" + + maxWarningHeaderSize + "] bytes set in [" + + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!"); return this; } newWarningHeaderSize += "Warning".getBytes(StandardCharsets.UTF_8).length + value.getBytes(StandardCharsets.UTF_8).length; if (newWarningHeaderSize > maxWarningHeaderSize) { - final String message = "Dropping a warning header, as their total size reached the maximum allowed of [" + - maxWarningHeaderSize + "] bytes set in [" + - HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!"; - ESLoggerFactory.getLogger(ThreadContext.class).warn(message); + logger.warn("Dropping a warning header, as their total size reached the maximum allowed of [" + + maxWarningHeaderSize + "] bytes set in [" + + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!"); return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize); } } @@ -505,9 +505,9 @@ public final class ThreadContext implements Closeable, Writeable { if ((key.equals("Warning")) && (maxWarningHeaderCount != -1)) { //if count is NOT unbounded, check its limits final int warningHeaderCount = newResponseHeaders.containsKey("Warning") ? newResponseHeaders.get("Warning").size() : 0; if (warningHeaderCount > maxWarningHeaderCount) { - final String message = "Dropping a warning header, as their total count reached the maximum allowed of [" + - maxWarningHeaderCount + "] set in [" + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT.getKey() + "]!"; - ESLoggerFactory.getLogger(ThreadContext.class).warn(message); + logger.warn("Dropping a warning header, as their total count reached the maximum allowed of [" + + maxWarningHeaderCount + "] set in [" + + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT.getKey() + "]!"); return this; } } @@ -641,7 +641,7 @@ public final class ThreadContext implements Closeable, Writeable { assert e instanceof CancellationException || e instanceof InterruptedException || e instanceof ExecutionException : e; - final Optional maybeError = ExceptionsHelper.maybeError(e, ESLoggerFactory.getLogger(ThreadContext.class)); + final Optional maybeError = ExceptionsHelper.maybeError(e, logger); if (maybeError.isPresent()) { // throw this error where it will propagate to the uncaught exception handler throw maybeError.get(); diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 7d490be4af5..ff8baaabb44 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -82,6 +82,7 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import static java.util.Collections.unmodifiableSet; @@ -818,13 +819,21 @@ public final class NodeEnvironment implements Closeable { * Returns all folder names in ${data.paths}/nodes/{node.id}/indices folder */ public Set availableIndexFolders() throws IOException { + return availableIndexFolders(p -> false); + } + + /** + * Returns folder names in ${data.paths}/nodes/{node.id}/indices folder that don't match the given predicate. + * @param excludeIndexPathIdsPredicate folder names to exclude + */ + public Set availableIndexFolders(Predicate excludeIndexPathIdsPredicate) throws IOException { if (nodePaths == null || locks == null) { throw new IllegalStateException("node is not configured to store local location"); } assertEnvIsLocked(); Set indexFolders = new HashSet<>(); for (NodePath nodePath : nodePaths) { - indexFolders.addAll(availableIndexFoldersForPath(nodePath)); + indexFolders.addAll(availableIndexFoldersForPath(nodePath, excludeIndexPathIdsPredicate)); } return indexFolders; @@ -838,6 +847,19 @@ public final class NodeEnvironment implements Closeable { * @throws IOException if an I/O exception occurs traversing the filesystem */ public Set availableIndexFoldersForPath(final NodePath nodePath) throws IOException { + return availableIndexFoldersForPath(nodePath, p -> false); + } + + /** + * Return directory names in the nodes/{node.id}/indices directory for the given node path that don't match the given predicate. + * + * @param nodePath the path + * @param excludeIndexPathIdsPredicate folder names to exclude + * @return all directories that could be indices for the given node path. + * @throws IOException if an I/O exception occurs traversing the filesystem + */ + public Set availableIndexFoldersForPath(final NodePath nodePath, Predicate excludeIndexPathIdsPredicate) + throws IOException { if (nodePaths == null || locks == null) { throw new IllegalStateException("node is not configured to store local location"); } @@ -847,8 +869,9 @@ public final class NodeEnvironment implements Closeable { if (Files.isDirectory(indicesLocation)) { try (DirectoryStream stream = Files.newDirectoryStream(indicesLocation)) { for (Path index : stream) { - if (Files.isDirectory(index)) { - indexFolders.add(index.getFileName().toString()); + final String fileName = index.getFileName().toString(); + if (excludeIndexPathIdsPredicate.test(fileName) == false && Files.isDirectory(index)) { + indexFolders.add(fileName); } } } diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java index fd1698bb006..9377247488e 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -85,10 +85,9 @@ public class MetaStateService extends AbstractComponent { */ List loadIndicesStates(Predicate excludeIndexPathIdsPredicate) throws IOException { List indexMetaDataList = new ArrayList<>(); - for (String indexFolderName : nodeEnv.availableIndexFolders()) { - if (excludeIndexPathIdsPredicate.test(indexFolderName)) { - continue; - } + for (String indexFolderName : nodeEnv.availableIndexFolders(excludeIndexPathIdsPredicate)) { + assert excludeIndexPathIdsPredicate.test(indexFolderName) == false : + "unexpected folder " + indexFolderName + " which should have been excluded"; IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.resolveIndexFolder(indexFolderName)); if (indexMetaData != null) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java index 84eb0c4c349..a3fddce3e06 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java @@ -41,7 +41,7 @@ public class PreConfiguredCharFilter extends PreConfiguredAnalysisComponent create) { diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 187b0eb1359..55d93203abe 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1086,7 +1086,7 @@ public class InternalEngine extends Engine { return new IndexingStrategy(true, false, true, false, seqNoForIndexing, versionForIndexing, null); } - static IndexingStrategy skipDueToVersionConflict( + public static IndexingStrategy skipDueToVersionConflict( VersionConflictEngineException e, boolean currentNotFoundOrDeleted, long currentVersion, long term) { final IndexResult result = new IndexResult(e, currentVersion, term); return new IndexingStrategy( @@ -1343,7 +1343,7 @@ public class InternalEngine extends Engine { Optional.empty() : Optional.of(earlyResultOnPreflightError); } - static DeletionStrategy skipDueToVersionConflict( + public static DeletionStrategy skipDueToVersionConflict( VersionConflictEngineException e, long currentVersion, long term, boolean currentlyDeleted) { final long unassignedSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; final DeleteResult deleteResult = new DeleteResult(e, currentVersion, term, unassignedSeqNo, currentlyDeleted == false); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index 6896432bcdd..642270113cf 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.NestedSortBuilder; import java.io.IOException; @@ -129,10 +130,12 @@ public interface IndexFieldData extends IndexCompone private final BitSetProducer rootFilter; private final Query innerQuery; + private final NestedSortBuilder nestedSort; - public Nested(BitSetProducer rootFilter, Query innerQuery) { + public Nested(BitSetProducer rootFilter, Query innerQuery, NestedSortBuilder nestedSort) { this.rootFilter = rootFilter; this.innerQuery = innerQuery; + this.nestedSort = nestedSort; } public Query getInnerQuery() { @@ -143,6 +146,8 @@ public interface IndexFieldData extends IndexCompone return rootFilter; } + public NestedSortBuilder getNestedSort() { return nestedSort; } + /** * Get a {@link BitDocIdSet} that matches the root documents. */ diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 8e0a31859a1..859848df49e 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -91,7 +91,9 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, rootDocs, innerDocs); + final int maxChildren = nested.getNestedSort() != null ? + nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, rootDocs, innerDocs, maxChildren); } if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { return selectedValues; @@ -119,7 +121,8 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues; } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 1ae3fb692ec..0a273d88380 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -76,7 +76,8 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues.getRawDoubleValues(); } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index b271dd54bd7..beb27644a1b 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -68,7 +68,8 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues.getRawFloatValues(); } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 362dde60996..f323709e8f5 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -67,7 +67,8 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index ce6e10ccc0f..e6ec6e446bf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -19,13 +19,13 @@ package org.elasticsearch.index.mapper; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -49,7 +49,7 @@ import java.util.Objects; public class FieldNamesFieldMapper extends MetadataFieldMapper { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger( - ESLoggerFactory.getLogger(FieldNamesFieldMapper.class)); + LogManager.getLogger(FieldNamesFieldMapper.class)); public static final String NAME = "_field_names"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index b4531f9c489..8d9a6887765 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -186,6 +186,11 @@ public class NumberFieldMapper extends FieldMapper { return result; } + @Override + public Number parsePoint(byte[] value) { + return HalfFloatPoint.decodeDimension(value, 0); + } + @Override public Float parse(XContentParser parser, boolean coerce) throws IOException { float parsed = parser.floatValue(coerce); @@ -278,6 +283,11 @@ public class NumberFieldMapper extends FieldMapper { return result; } + @Override + public Number parsePoint(byte[] value) { + return FloatPoint.decodeDimension(value, 0); + } + @Override public Float parse(XContentParser parser, boolean coerce) throws IOException { float parsed = parser.floatValue(coerce); @@ -359,6 +369,11 @@ public class NumberFieldMapper extends FieldMapper { return parsed; } + @Override + public Number parsePoint(byte[] value) { + return DoublePoint.decodeDimension(value, 0); + } + @Override public Double parse(XContentParser parser, boolean coerce) throws IOException { double parsed = parser.doubleValue(coerce); @@ -451,6 +466,11 @@ public class NumberFieldMapper extends FieldMapper { return (byte) doubleValue; } + @Override + public Number parsePoint(byte[] value) { + return INTEGER.parsePoint(value).byteValue(); + } + @Override public Short parse(XContentParser parser, boolean coerce) throws IOException { int value = parser.intValue(coerce); @@ -507,6 +527,11 @@ public class NumberFieldMapper extends FieldMapper { return (short) doubleValue; } + @Override + public Number parsePoint(byte[] value) { + return INTEGER.parsePoint(value).shortValue(); + } + @Override public Short parse(XContentParser parser, boolean coerce) throws IOException { return parser.shortValue(coerce); @@ -559,6 +584,11 @@ public class NumberFieldMapper extends FieldMapper { return (int) doubleValue; } + @Override + public Number parsePoint(byte[] value) { + return IntPoint.decodeDimension(value, 0); + } + @Override public Integer parse(XContentParser parser, boolean coerce) throws IOException { return parser.intValue(coerce); @@ -673,6 +703,11 @@ public class NumberFieldMapper extends FieldMapper { return Numbers.toLong(stringValue, coerce); } + @Override + public Number parsePoint(byte[] value) { + return LongPoint.decodeDimension(value, 0); + } + @Override public Long parse(XContentParser parser, boolean coerce) throws IOException { return parser.longValue(coerce); @@ -789,6 +824,7 @@ public class NumberFieldMapper extends FieldMapper { boolean hasDocValues); public abstract Number parse(XContentParser parser, boolean coerce) throws IOException; public abstract Number parse(Object value, boolean coerce); + public abstract Number parsePoint(byte[] value); public abstract List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored); Number valueForSearch(Number value) { @@ -937,6 +973,10 @@ public class NumberFieldMapper extends FieldMapper { } } + public Number parsePoint(byte[] value) { + return type.parsePoint(value); + } + @Override public boolean equals(Object o) { if (super.equals(o) == false) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 792ac4ba03e..3e7bd121d0b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -27,7 +28,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -45,7 +45,7 @@ import java.util.Locale; import java.util.Map; public class ObjectMapper extends Mapper implements Cloneable { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(ObjectMapper.class)); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(ObjectMapper.class)); public static final String CONTENT_TYPE = "object"; public static final String NESTED_CONTENT_TYPE = "nested"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index 162ce2a3fde..d6d453dbb2b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; @@ -36,7 +37,6 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -92,7 +92,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { static final class TypeFieldType extends StringFieldType { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(ESLoggerFactory.getLogger(TypeFieldType.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(TypeFieldType.class)); TypeFieldType() { } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index b2e6f98f126..4aa9bc5ce14 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -150,7 +150,7 @@ public abstract class AbstractBulkByScrollRequest 0)) { e = addValidationError( diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java index 7aa2c8a1b75..8df6620dabc 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java @@ -319,6 +319,14 @@ public class BulkByScrollTask extends CancellableTask { } } + /** + * Status of the reindex, update by query, or delete by query. While in + * general we allow {@linkplain Task.Status} implementations to make + * backwards incompatible changes to their {@link Task.Status#toXContent} + * implementations, this one has become defacto standardized because Kibana + * parses it. As such, we should be very careful about removing things from + * this. + */ public static class Status implements Task.Status, SuccessfullyProcessed { public static final String NAME = "bulk-by-scroll"; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 11d8f44bef1..17756630517 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -791,7 +791,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl + "]"; ensureWriteAllowed(origin); // When there is a single type, the unique identifier is only composed of the _id, - // so there is no way to differenciate foo#1 from bar#1. This is especially an issue + // so there is no way to differentiate foo#1 from bar#1. This is especially an issue // if a user first deletes foo#1 and then indexes bar#1: since we do not encode the // _type in the uid it might look like we are reindexing the same document, which // would fail if bar#1 is indexed with a lower version than foo#1 was deleted with. diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 13499dfd60c..18ccb988b8c 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -1047,7 +1047,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } /** - * returns the history uuid the store points at, or null if not existant. + * returns the history uuid the store points at, or null if nonexistent. */ public String getHistoryUUID() { return commitUserData.get(Engine.HISTORY_UUID_KEY); diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java index 8dd5ddcee3b..1f877927455 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java @@ -85,12 +85,12 @@ public class TranslogStats implements Streamable, ToXContentFragment { return numberOfOperations; } - /** the size of the generations in the translog that weren't yet to comitted to lucene */ + /** the size of the generations in the translog that weren't yet to committed to lucene */ public long getUncommittedSizeInBytes() { return uncommittedSizeInBytes; } - /** the number of operations in generations of the translog that weren't yet to comitted to lucene */ + /** the number of operations in generations of the translog that weren't yet to committed to lucene */ public int getUncommittedOperations() { return uncommittedOperations; } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index da80a07abc1..626d6e8df17 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -47,6 +47,7 @@ import java.io.Closeable; import java.util.Collection; import java.util.Collections; import java.util.Iterator; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.function.Supplier; @@ -54,7 +55,7 @@ import java.util.function.Supplier; /** * The indices request cache allows to cache a shard level request stage responses, helping with improving * similar requests that are potentially expensive (because of aggs for example). The cache is fully coherent - * with the semantics of NRT (the index reader version is part of the cache key), and relies on size based + * with the semantics of NRT (the index reader cache key is part of the cache key), and relies on size based * eviction to evict old reader associated cache entries as well as scheduler reaper to clean readers that * are no longer used or closed shards. *

@@ -105,7 +106,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo } void clear(CacheEntity entity) { - keysToClean.add(new CleanupKey(entity, -1)); + keysToClean.add(new CleanupKey(entity, null)); cleanCache(); } @@ -119,7 +120,8 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo // removed when this issue is solved BytesReference getOrCompute(CacheEntity cacheEntity, Supplier loader, DirectoryReader reader, BytesReference cacheKey, Supplier cacheKeyRenderer) throws Exception { - final Key key = new Key(cacheEntity, reader.getVersion(), cacheKey); + assert reader.getReaderCacheHelper() != null; + final Key key = new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey); Loader cacheLoader = new Loader(cacheEntity, loader); BytesReference value = cache.computeIfAbsent(key, cacheLoader); if (cacheLoader.isLoaded()) { @@ -128,7 +130,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo logger.trace("Cache miss for reader version [{}] and request:\n {}", reader.getVersion(), cacheKeyRenderer.get()); } // see if its the first time we see this reader, and make sure to register a cleanup key - CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getVersion()); + CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getReaderCacheHelper().getKey()); if (!registeredClosedListeners.containsKey(cleanupKey)) { Boolean previous = registeredClosedListeners.putIfAbsent(cleanupKey, Boolean.TRUE); if (previous == null) { @@ -151,7 +153,8 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo * @param cacheKey the cache key to invalidate */ void invalidate(CacheEntity cacheEntity, DirectoryReader reader, BytesReference cacheKey) { - cache.invalidate(new Key(cacheEntity, reader.getVersion(), cacheKey)); + assert reader.getReaderCacheHelper() != null; + cache.invalidate(new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey)); } private static class Loader implements CacheLoader { @@ -220,12 +223,12 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class); public final CacheEntity entity; // use as identity equality - public final long readerVersion; // use the reader version to now keep a reference to a "short" lived reader until its reaped + public final IndexReader.CacheKey readerCacheKey; public final BytesReference value; - Key(CacheEntity entity, long readerVersion, BytesReference value) { + Key(CacheEntity entity, IndexReader.CacheKey readerCacheKey, BytesReference value) { this.entity = entity; - this.readerVersion = readerVersion; + this.readerCacheKey = Objects.requireNonNull(readerCacheKey); this.value = value; } @@ -245,7 +248,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Key key = (Key) o; - if (readerVersion != key.readerVersion) return false; + if (Objects.equals(readerCacheKey, key.readerCacheKey) == false) return false; if (!entity.getCacheIdentity().equals(key.entity.getCacheIdentity())) return false; if (!value.equals(key.value)) return false; return true; @@ -254,7 +257,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo @Override public int hashCode() { int result = entity.getCacheIdentity().hashCode(); - result = 31 * result + Long.hashCode(readerVersion); + result = 31 * result + readerCacheKey.hashCode(); result = 31 * result + value.hashCode(); return result; } @@ -262,11 +265,11 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo private class CleanupKey implements IndexReader.ClosedListener { final CacheEntity entity; - final long readerVersion; // use the reader version to now keep a reference to a "short" lived reader until its reaped + final IndexReader.CacheKey readerCacheKey; - private CleanupKey(CacheEntity entity, long readerVersion) { + private CleanupKey(CacheEntity entity, IndexReader.CacheKey readerCacheKey) { this.entity = entity; - this.readerVersion = readerVersion; + this.readerCacheKey = readerCacheKey; } @Override @@ -284,7 +287,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo return false; } CleanupKey that = (CleanupKey) o; - if (readerVersion != that.readerVersion) return false; + if (Objects.equals(readerCacheKey, that.readerCacheKey) == false) return false; if (!entity.getCacheIdentity().equals(that.entity.getCacheIdentity())) return false; return true; } @@ -292,7 +295,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo @Override public int hashCode() { int result = entity.getCacheIdentity().hashCode(); - result = 31 * result + Long.hashCode(readerVersion); + result = 31 * result + Objects.hashCode(readerCacheKey); return result; } } @@ -307,7 +310,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo for (Iterator iterator = keysToClean.iterator(); iterator.hasNext(); ) { CleanupKey cleanupKey = iterator.next(); iterator.remove(); - if (cleanupKey.readerVersion == -1 || cleanupKey.entity.isOpen() == false) { + if (cleanupKey.readerCacheKey == null || cleanupKey.entity.isOpen() == false) { // -1 indicates full cleanup, as does a closed shard currentFullClean.add(cleanupKey.entity.getCacheIdentity()); } else { @@ -320,7 +323,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo if (currentFullClean.contains(key.entity.getCacheIdentity())) { iterator.remove(); } else { - if (currentKeysToClean.contains(new CleanupKey(key.entity, key.readerVersion))) { + if (currentKeysToClean.contains(new CleanupKey(key.entity, key.readerCacheKey))) { iterator.remove(); } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 05dcf0b2d01..83d358424de 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -1166,10 +1166,9 @@ public class IndicesService extends AbstractLifecycleComponent } else if (request.requestCache() == false) { return false; } - // if the reader is not a directory reader, we can't get the version from it - if ((context.searcher().getIndexReader() instanceof DirectoryReader) == false) { - return false; - } + // We use the cacheKey of the index reader as a part of a key of the IndicesRequestCache. + assert context.searcher().getIndexReader().getReaderCacheHelper() != null; + // if now in millis is used (or in the future, a more generic "isDeterministic" flag // then we can't cache based on "now" key within the search request, as it is not deterministic if (context.getQueryShardContext().isCachable() == false) { diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index f9423f1b13c..9e0aa24a105 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -20,10 +20,10 @@ package org.elasticsearch.monitor.os; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.Constants; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.monitor.Probes; import java.io.IOException; @@ -517,7 +517,7 @@ public class OsProbe { } - private final Logger logger = ESLoggerFactory.getLogger(getClass()); + private final Logger logger = LogManager.getLogger(getClass()); public OsInfo osInfo(long refreshInterval, int allocatedProcessors) { return new OsInfo(refreshInterval, Runtime.getRuntime().availableProcessors(), diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 54d9ade581e..c0d94c3f000 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -103,6 +103,22 @@ public interface ActionPlugin { /** * Returns a function used to wrap each rest request before handling the request. + * The returned {@link UnaryOperator} is called for every incoming rest request and receives + * the original rest handler as it's input. This allows adding arbitrary functionality around + * rest request handlers to do for instance logging or authentication. + * A simple example of how to only allow GET request is here: + *

+     * {@code
+     *    UnaryOperator getRestHandlerWrapper(ThreadContext threadContext) {
+     *      return originalHandler -> (RestHandler) (request, channel, client) -> {
+     *        if (request.method() != Method.GET) {
+     *          throw new IllegalStateException("only GET requests are allowed");
+     *        }
+     *        originalHandler.handleRequest(request, channel, client);
+     *      };
+     *    }
+     * }
+     * 
* * Note: Only one installed plugin may implement a rest wrapper. */ diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index b60288900ab..6805a0e40d5 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -20,6 +20,7 @@ package org.elasticsearch.plugins; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.util.CharFilterFactory; import org.apache.lucene.analysis.util.TokenFilterFactory; import org.apache.lucene.analysis.util.TokenizerFactory; @@ -36,7 +37,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -475,7 +475,7 @@ public class PluginsService extends AbstractComponent { List exts = bundle.plugin.getExtendedPlugins(); try { - final Logger logger = ESLoggerFactory.getLogger(JarHell.class); + final Logger logger = LogManager.getLogger(JarHell.class); Set urls = new HashSet<>(); for (String extendedPlugin : exts) { Set pluginUrls = transitiveUrls.get(extendedPlugin); diff --git a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java index 3abb0f6a304..f6659e8041e 100644 --- a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java +++ b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java @@ -19,9 +19,9 @@ package org.elasticsearch.script; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -48,7 +48,7 @@ import java.util.Locale; */ public class JodaCompatibleZonedDateTime { private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(ESLoggerFactory.getLogger(JodaCompatibleZonedDateTime.class)); + new DeprecationLogger(LogManager.getLogger(JodaCompatibleZonedDateTime.class)); private static void logDeprecated(String key, String message, Object... params) { // NOTE: we don't check SpecialPermission because this will be called (indirectly) from scripts diff --git a/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java b/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java index 085f40e0d7a..122e3defe75 100644 --- a/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java +++ b/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java @@ -61,15 +61,21 @@ public abstract class TermsSetQueryScript { private final LeafSearchLookup leafLookup; public TermsSetQueryScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { - this.params = new ParameterMap(params, DEPRECATIONS); + Map parameters = new HashMap<>(params); this.leafLookup = lookup.getLeafSearchLookup(leafContext); + parameters.putAll(leafLookup.asMap()); + this.params = new ParameterMap(parameters, DEPRECATIONS); + } + + protected TermsSetQueryScript() { + params = null; + leafLookup = null; } /** * Return the parameters for this script. */ public Map getParams() { - this.params.putAll(leafLookup.asMap()); return params; } diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index eaaa5f74fa4..249a110b01d 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -48,7 +48,6 @@ import java.util.Locale; * Defines what values to pick in the case a document contains multiple values for a particular field. */ public enum MultiValueMode implements Writeable { - /** * Pick the sum of all the values. */ @@ -64,16 +63,21 @@ public enum MultiValueMode implements Writeable { } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; long totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } return totalCount > 0 ? totalValue : missingValue; @@ -90,18 +94,23 @@ public enum MultiValueMode implements Writeable { } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; double totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } + return totalCount > 0 ? totalValue : missingValue; } }, @@ -117,26 +126,30 @@ public enum MultiValueMode implements Writeable { for (int index = 0; index < count; ++index) { total += values.nextValue(); } - return count > 1 ? Math.round((double)total/(double)count) : total; + return count > 1 ? Math.round((double) total / (double) count) : total; } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; long totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } if (totalCount < 1) { return missingValue; } - return totalCount > 1 ? Math.round((double)totalValue/(double)totalCount) : totalValue; + return totalCount > 1 ? Math.round((double) totalValue / (double) totalCount) : totalValue; } @Override @@ -146,26 +159,30 @@ public enum MultiValueMode implements Writeable { for (int index = 0; index < count; ++index) { total += values.nextValue(); } - return total/count; + return total / count; } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; double totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } if (totalCount < 1) { return missingValue; } - return totalValue/totalCount; + return totalValue / totalCount; } }, @@ -210,11 +227,15 @@ public enum MultiValueMode implements Writeable { } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; long minValue = Long.MAX_VALUE; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } minValue = Math.min(minValue, values.nextValue()); hasValue = true; } @@ -228,11 +249,15 @@ public enum MultiValueMode implements Writeable { } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; double minValue = Double.POSITIVE_INFINITY; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } minValue = Math.min(minValue, values.nextValue()); hasValue = true; } @@ -246,23 +271,27 @@ public enum MultiValueMode implements Writeable { } @Override - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { - BytesRefBuilder value = null; + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { + BytesRefBuilder bytesRefBuilder = null; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } final BytesRef innerValue = values.binaryValue(); - if (value == null) { + if (bytesRefBuilder == null) { builder.copyBytes(innerValue); - value = builder; + bytesRefBuilder = builder; } else { - final BytesRef min = value.get().compareTo(innerValue) <= 0 ? value.get() : innerValue; + final BytesRef min = bytesRefBuilder.get().compareTo(innerValue) <= 0 ? bytesRefBuilder.get() : innerValue; if (min == innerValue) { - value.copyBytes(min); + bytesRefBuilder.copyBytes(min); } } } } - return value == null ? null : value.get(); + return bytesRefBuilder == null ? null : bytesRefBuilder.get(); } @Override @@ -271,16 +300,21 @@ public enum MultiValueMode implements Writeable { } @Override - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int ord = Integer.MAX_VALUE; boolean hasValue = false; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } final int innerOrd = values.ordValue(); ord = Math.min(ord, innerOrd); hasValue = true; } } + return hasValue ? ord : -1; } }, @@ -299,13 +333,17 @@ public enum MultiValueMode implements Writeable { } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; long maxValue = Long.MIN_VALUE; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int i = 0; i < count - 1; ++i) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int i = 0; i < docCount - 1; ++i) { values.nextValue(); } maxValue = Math.max(maxValue, values.nextValue()); @@ -325,13 +363,17 @@ public enum MultiValueMode implements Writeable { } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; double maxValue = Double.NEGATIVE_INFINITY; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int i = 0; i < count - 1; ++i) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int i = 0; i < docCount - 1; ++i) { values.nextValue(); } maxValue = Math.max(maxValue, values.nextValue()); @@ -351,23 +393,27 @@ public enum MultiValueMode implements Writeable { } @Override - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { - BytesRefBuilder value = null; + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { + BytesRefBuilder bytesRefBuilder = null; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } final BytesRef innerValue = values.binaryValue(); - if (value == null) { + if (bytesRefBuilder == null) { builder.copyBytes(innerValue); - value = builder; + bytesRefBuilder = builder; } else { - final BytesRef max = value.get().compareTo(innerValue) > 0 ? value.get() : innerValue; + final BytesRef max = bytesRefBuilder.get().compareTo(innerValue) > 0 ? bytesRefBuilder.get() : innerValue; if (max == innerValue) { - value.copyBytes(max); + bytesRefBuilder.copyBytes(max); } } } } - return value == null ? null : value.get(); + return bytesRefBuilder == null ? null : bytesRefBuilder.get(); } @Override @@ -380,10 +426,14 @@ public enum MultiValueMode implements Writeable { } @Override - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int ord = -1; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } ord = Math.max(ord, values.ordValue()); } } @@ -458,7 +508,7 @@ public enum MultiValueMode implements Writeable { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc) throws IOException { + public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return FieldData.replaceMissing(DocValues.emptyNumeric(), missingValue); } @@ -486,7 +536,7 @@ public enum MultiValueMode implements Writeable { } lastSeenParentDoc = parentDoc; - lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc); + lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc, maxChildren); return true; } @@ -502,7 +552,7 @@ public enum MultiValueMode implements Writeable { }; } - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -555,7 +605,7 @@ public enum MultiValueMode implements Writeable { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc) throws IOException { + public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return FieldData.replaceMissing(FieldData.emptyNumericDouble(), missingValue); } @@ -580,7 +630,7 @@ public enum MultiValueMode implements Writeable { } lastSeenParentDoc = parentDoc; - lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc); + lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc, maxChildren); return true; } @@ -591,7 +641,7 @@ public enum MultiValueMode implements Writeable { }; } - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -663,7 +713,7 @@ public enum MultiValueMode implements Writeable { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc) throws IOException { + public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return select(FieldData.emptySortedBinary(), missingValue); } @@ -692,7 +742,7 @@ public enum MultiValueMode implements Writeable { } lastSeenParentDoc = parentDoc; - lastEmittedValue = pick(selectedValues, builder, childDocs, firstChildDoc, parentDoc); + lastEmittedValue = pick(selectedValues, builder, childDocs, firstChildDoc, parentDoc, maxChildren); if (lastEmittedValue == null) { lastEmittedValue = missingValue; } @@ -706,7 +756,7 @@ public enum MultiValueMode implements Writeable { }; } - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -718,7 +768,7 @@ public enum MultiValueMode implements Writeable { */ public SortedDocValues select(final SortedSetDocValues values) { if (values.getValueCount() >= Integer.MAX_VALUE) { - throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE-1) + " unique terms are unsupported"); + throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE - 1) + " unique terms are unsupported"); } final SortedDocValues singleton = DocValues.unwrapSingleton(values); @@ -779,7 +829,7 @@ public enum MultiValueMode implements Writeable { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public SortedDocValues select(final SortedSetDocValues values, final BitSet parentDocs, final DocIdSetIterator childDocs) throws IOException { + public SortedDocValues select(final SortedSetDocValues values, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return select(DocValues.emptySortedSet()); } @@ -817,7 +867,7 @@ public enum MultiValueMode implements Writeable { } docID = lastSeenParentDoc = parentDoc; - lastEmittedOrd = pick(selectedValues, childDocs, firstChildDoc, parentDoc); + lastEmittedOrd = pick(selectedValues, childDocs, firstChildDoc, parentDoc, maxChildren); return lastEmittedOrd != -1; } @@ -833,7 +883,7 @@ public enum MultiValueMode implements Writeable { }; } - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index c8b1e630b85..c7d500e81ca 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -50,7 +50,7 @@ public class FilterAggregatorFactory extends AggregatorFactory pointConverter; + DoubleArray maxes; - MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, - SearchContext context, - Aggregator parent, List pipelineAggregators, - Map metaData) throws IOException { + MaxAggregator(String name, + ValuesSourceConfig config, + ValuesSource.Numeric valuesSource, + SearchContext context, + Aggregator parent, List pipelineAggregators, + Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; - this.formatter = formatter; if (valuesSource != null) { maxes = context.bigArrays().newDoubleArray(1, false); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); } + this.formatter = config.format(); + this.pointConverter = getPointReaderOrNull(context, parent, config); + if (pointConverter != null) { + pointField = config.fieldContext().field(); + } else { + pointField = null; + } } @Override @@ -68,8 +88,28 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue { public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { - return LeafBucketCollector.NO_OP_COLLECTOR; - } + if (parent != null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } else { + // we have no parent and the values source is empty so we can skip collecting hits. + throw new CollectionTerminatedException(); + } + } + if (pointConverter != null) { + Number segMax = findLeafMaxValue(ctx.reader(), pointField, pointConverter); + if (segMax != null) { + /** + * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull} + * so the ordinal for the bucket is always 0. + */ + assert maxes.size() == 1; + double max = maxes.get(0); + max = Math.max(max, segMax.doubleValue()); + maxes.set(0, max); + // the maximum value has been extracted, we don't need to collect hits on this segment. + throw new CollectionTerminatedException(); + } + } final BigArrays bigArrays = context.bigArrays(); final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx); final NumericDoubleValues values = MultiValueMode.MAX.select(allValues); @@ -118,4 +158,48 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue { public void doClose() { Releasables.close(maxes); } + + /** + * Returns the maximum value indexed in the fieldName field or null + * if the value cannot be inferred from the indexed {@link PointValues}. + */ + static Number findLeafMaxValue(LeafReader reader, String fieldName, Function converter) throws IOException { + final PointValues pointValues = reader.getPointValues(fieldName); + if (pointValues == null) { + return null; + } + final Bits liveDocs = reader.getLiveDocs(); + if (liveDocs == null) { + return converter.apply(pointValues.getMaxPackedValue()); + } + int numBytes = pointValues.getBytesPerDimension(); + final byte[] maxValue = pointValues.getMaxPackedValue(); + final Number[] result = new Number[1]; + pointValues.intersect(new PointValues.IntersectVisitor() { + @Override + public void visit(int docID) { + throw new UnsupportedOperationException(); + } + + @Override + public void visit(int docID, byte[] packedValue) { + if (liveDocs.get(docID)) { + // we need to collect all values in this leaf (the sort is ascending) where + // the last live doc is guaranteed to contain the max value for the segment. + result[0] = converter.apply(packedValue); + } + } + + @Override + public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + if (FutureArrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) { + // we only check leaves that contain the max value for the segment. + return PointValues.Relation.CELL_CROSSES_QUERY; + } else { + return PointValues.Relation.CELL_OUTSIDE_QUERY; + } + } + }); + return result[0]; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java index 314e1106b37..d64987d9cde 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java @@ -43,13 +43,13 @@ class MaxAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - return new MaxAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData); + return new MaxAggregator(name, config, null, context, parent, pipelineAggregators, metaData); } @Override protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - return new MaxAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData); + return new MaxAggregator(name, config, valuesSource, context, parent, pipelineAggregators, metaData); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java index ea8e160e138..df24ee7387f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java @@ -18,13 +18,23 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.util.Bits; import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.aggregations.Aggregator; @@ -33,29 +43,44 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.function.Function; class MinAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource.Numeric valuesSource; final DocValueFormat format; + final String pointField; + final Function pointConverter; + DoubleArray mins; - MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, - SearchContext context, Aggregator parent, List pipelineAggregators, - Map metaData) throws IOException { + MinAggregator(String name, + ValuesSourceConfig config, + ValuesSource.Numeric valuesSource, + SearchContext context, + Aggregator parent, + List pipelineAggregators, + Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { mins = context.bigArrays().newDoubleArray(1, false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); } - this.format = formatter; + this.format = config.format(); + this.pointConverter = getPointReaderOrNull(context, parent, config); + if (pointConverter != null) { + pointField = config.fieldContext().field(); + } else { + pointField = null; + } } @Override @@ -67,7 +92,26 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue { public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { - return LeafBucketCollector.NO_OP_COLLECTOR; + if (parent == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } else { + // we have no parent and the values source is empty so we can skip collecting hits. + throw new CollectionTerminatedException(); + } + } + if (pointConverter != null) { + Number segMin = findLeafMinValue(ctx.reader(), pointField, pointConverter); + if (segMin != null) { + /** + * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull} + * so the ordinal for the bucket is always 0. + */ + double min = mins.get(0); + min = Math.min(min, segMin.doubleValue()); + mins.set(0, min); + // the minimum value has been extracted, we don't need to collect hits on this segment. + throw new CollectionTerminatedException(); + } } final BigArrays bigArrays = context.bigArrays(); final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx); @@ -117,4 +161,77 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue { public void doClose() { Releasables.close(mins); } + + + /** + * Returns a converter for point values if early termination is applicable to + * the context or null otherwise. + * + * @param context The {@link SearchContext} of the aggregation. + * @param parent The parent aggregator. + * @param config The config for the values source metric. + */ + static Function getPointReaderOrNull(SearchContext context, Aggregator parent, + ValuesSourceConfig config) { + if (context.query() != null && + context.query().getClass() != MatchAllDocsQuery.class) { + return null; + } + if (parent != null) { + return null; + } + if (config.fieldContext() != null && config.script() == null) { + MappedFieldType fieldType = config.fieldContext().fieldType(); + if (fieldType == null || fieldType.indexOptions() == IndexOptions.NONE) { + return null; + } + Function converter = null; + if (fieldType instanceof NumberFieldMapper.NumberFieldType) { + converter = ((NumberFieldMapper.NumberFieldType) fieldType)::parsePoint; + } else if (fieldType.getClass() == DateFieldMapper.DateFieldType.class) { + converter = (in) -> LongPoint.decodeDimension(in, 0); + } + return converter; + } + return null; + } + + /** + * Returns the minimum value indexed in the fieldName field or null + * if the value cannot be inferred from the indexed {@link PointValues}. + */ + static Number findLeafMinValue(LeafReader reader, String fieldName, Function converter) throws IOException { + final PointValues pointValues = reader.getPointValues(fieldName); + if (pointValues == null) { + return null; + } + final Bits liveDocs = reader.getLiveDocs(); + if (liveDocs == null) { + return converter.apply(pointValues.getMinPackedValue()); + } + final Number[] result = new Number[1]; + try { + pointValues.intersect(new PointValues.IntersectVisitor() { + @Override + public void visit(int docID) { + throw new UnsupportedOperationException(); + } + + @Override + public void visit(int docID, byte[] packedValue) { + if (liveDocs.get(docID)) { + result[0] = converter.apply(packedValue); + // this is the first leaf with a live doc so the value is the minimum for this segment. + throw new CollectionTerminatedException(); + } + } + + @Override + public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return PointValues.Relation.CELL_CROSSES_QUERY; + } + }); + } catch (CollectionTerminatedException e) {} + return result[0]; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java index d08b8199a33..240cf2ba715 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java @@ -43,12 +43,12 @@ class MinAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - return new MinAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData); + return new MinAggregator(name, config, null, context, parent, pipelineAggregators, metaData); } @Override protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - return new MinAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData); + return new MinAggregator(name, config, valuesSource, context, parent, pipelineAggregators, metaData); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java index 28d82f4cafd..6faf6a5d58c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java @@ -45,6 +45,10 @@ public abstract class ValuesSourceAggregatorFactory getConfig() { + return config; + } + @Override public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 0b3204e193b..583516c5cd4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -53,7 +53,7 @@ public final class FragmentBuilderHelper { if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(fieldType.indexAnalyzer())) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort - * the fragments based on their offsets rather than using soley the positions as it is done in + * the fragments based on their offsets rather than using solely the positions as it is done in * the FastVectorHighlighter. Yet, this is really a lucene problem and should be fixed in lucene rather * than in this hack... aka. "we are are working on in!" */ final List subInfos = fragInfo.getSubInfos(); diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 6a64b1c0cc9..19a62d74443 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -332,6 +332,14 @@ public class FieldSortBuilder extends SortBuilder { final Nested nested; if (nestedSort != null) { + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 6adad6dabf0..2c8c4e234db 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -633,6 +634,14 @@ public class GeoDistanceSortBuilder extends SortBuilder final Nested nested; if (nestedSort != null) { + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { @@ -672,8 +681,10 @@ public class GeoDistanceSortBuilder extends SortBuilder } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); + final int maxChildren = nested.getNestedSort() != null ? + nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; selectedValues = finalSortMode.select(distanceValues, Double.POSITIVE_INFINITY, rootDocs, innerDocs, - context.reader().maxDoc()); + context.reader().maxDoc(), maxChildren); } return selectedValues.getRawDoubleValues(); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java index a6ad0284034..eb39a8e0bc3 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -38,9 +39,11 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { public static final ParseField NESTED_FIELD = new ParseField("nested"); public static final ParseField PATH_FIELD = new ParseField("path"); public static final ParseField FILTER_FIELD = new ParseField("filter"); + public static final ParseField MAX_CHILDREN_FIELD = new ParseField("max_children"); private final String path; private QueryBuilder filter; + private int maxChildren = Integer.MAX_VALUE; private NestedSortBuilder nestedSort; public NestedSortBuilder(String path) { @@ -51,6 +54,11 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { path = in.readOptionalString(); filter = in.readOptionalNamedWriteable(QueryBuilder.class); nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + maxChildren = in.readVInt(); + } else { + maxChildren = Integer.MAX_VALUE; + } } public String getPath() { @@ -61,11 +69,18 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { return filter; } + public int getMaxChildren() { return maxChildren; } + public NestedSortBuilder setFilter(final QueryBuilder filter) { this.filter = filter; return this; } + public NestedSortBuilder setMaxChildren(final int maxChildren) { + this.maxChildren = maxChildren; + return this; + } + public NestedSortBuilder getNestedSort() { return nestedSort; } @@ -83,6 +98,9 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { out.writeOptionalString(path); out.writeOptionalNamedWriteable(filter); out.writeOptionalWriteable(nestedSort); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeVInt(maxChildren); + } } @Override @@ -94,6 +112,11 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { if (filter != null) { builder.field(FILTER_FIELD.getPreferredName(), filter); } + + if (maxChildren != Integer.MAX_VALUE) { + builder.field(MAX_CHILDREN_FIELD.getPreferredName(), maxChildren); + } + if (nestedSort != null) { builder.field(NESTED_FIELD.getPreferredName(), nestedSort); } @@ -104,6 +127,7 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { public static NestedSortBuilder fromXContent(XContentParser parser) throws IOException { String path = null; QueryBuilder filter = null; + int maxChildren = Integer.MAX_VALUE; NestedSortBuilder nestedSort = null; XContentParser.Token token = parser.currentToken(); @@ -116,6 +140,8 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { path = parser.text(); } else if (currentName.equals(FILTER_FIELD.getPreferredName())) { filter = parseNestedFilter(parser); + } else if (currentName.equals(MAX_CHILDREN_FIELD.getPreferredName())) { + maxChildren = parser.intValue(); } else if (currentName.equals(NESTED_FIELD.getPreferredName())) { nestedSort = NestedSortBuilder.fromXContent(parser); } else { @@ -129,7 +155,7 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { throw new IllegalArgumentException("malformed nested sort format, must start with an object"); } - return new NestedSortBuilder(path).setFilter(filter).setNestedSort(nestedSort); + return new NestedSortBuilder(path).setFilter(filter).setMaxChildren(maxChildren).setNestedSort(nestedSort); } @Override @@ -143,12 +169,13 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { NestedSortBuilder that = (NestedSortBuilder) obj; return Objects.equals(path, that.path) && Objects.equals(filter, that.filter) + && Objects.equals(maxChildren, that.maxChildren) && Objects.equals(nestedSort, that.nestedSort); } @Override public int hashCode() { - return Objects.hash(path, filter, nestedSort); + return Objects.hash(path, filter, nestedSort, maxChildren); } public NestedSortBuilder rewrite(QueryRewriteContext ctx) throws IOException { @@ -164,7 +191,7 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { rewriteNested = nestedSort.rewrite(ctx); } if (rewriteFilter != this.filter || rewriteNested != this.nestedSort) { - return new NestedSortBuilder(this.path).setFilter(rewriteFilter).setNestedSort(rewriteNested); + return new NestedSortBuilder(this.path).setFilter(rewriteFilter).setMaxChildren(this.maxChildren).setNestedSort(rewriteNested); } else { return this; } diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 1b71c51d416..e425755a55e 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -319,6 +319,14 @@ public class ScriptSortBuilder extends SortBuilder { final Nested nested; if (nestedSort != null) { + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 9d2a5c9f1e2..a7861dee9bb 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -195,7 +195,7 @@ public abstract class SortBuilder> implements NamedWrit } else { parentQuery = objectMapper.nestedTypeFilter(); } - return new Nested(context.bitsetFilter(parentQuery), childQuery); + return new Nested(context.bitsetFilter(parentQuery), childQuery, nestedSort); } private static Query resolveNestedQuery(QueryShardContext context, NestedSortBuilder nestedSort, Query parentQuery) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index f639846b418..1f89a7d88b9 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -157,9 +157,20 @@ public class Task { return null; } + /** + * Report of the internal status of a task. These can vary wildly from task + * to task because each task is implemented differently but we should try + * to keep each task consistent from version to version where possible. + * That means each implementation of {@linkplain Task.Status#toXContent} + * should avoid making backwards incompatible changes to the rendered + * result. But if we change the way a request is implemented it might not + * be possible to preserve backwards compatibility. In that case, we + * can change this on version upgrade but we should be careful + * because some statuses (reindex) have become defacto standardized because + * they are used by systems like Kibana. + */ public interface Status extends ToXContentObject, NamedWriteable {} - /** * Returns stored task header associated with the task */ diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskId.java b/server/src/main/java/org/elasticsearch/tasks/TaskId.java index 1aeceef247f..f92997b047c 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskId.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskId.java @@ -19,10 +19,13 @@ package org.elasticsearch.tasks; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -96,6 +99,15 @@ public final class TaskId implements Writeable { out.writeLong(id); } + public static ContextParser parser() { + return (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return new TaskId(p.text()); + } + throw new ElasticsearchParseException("Expected a string but found [{}] instead", p.currentToken()); + }; + } + public String getNodeId() { return nodeId; } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResult.java b/server/src/main/java/org/elasticsearch/tasks/TaskResult.java index a866ad9bb2d..46b68ce1602 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResult.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResult.java @@ -76,7 +76,7 @@ public final class TaskResult implements Writeable, ToXContentObject { * Construct a {@linkplain TaskResult} for a task that completed successfully. */ public TaskResult(TaskInfo task, ToXContent response) throws IOException { - this(true, task, null, toXContent(response)); + this(true, task, null, XContentHelper.toXContent(response, Requests.INDEX_CONTENT_TYPE, true)); } private TaskResult(boolean completed, TaskInfo task, @Nullable BytesReference error, @Nullable BytesReference result) { @@ -222,16 +222,6 @@ public final class TaskResult implements Writeable, ToXContentObject { return Objects.hash(completed, task, getErrorAsMap(), getResponseAsMap()); } - private static BytesReference toXContent(ToXContent result) throws IOException { - try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { - // Elasticsearch's Response object never emit starting or ending objects. Most other implementers of ToXContent do.... - builder.startObject(); - result.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - return BytesReference.bytes(builder); - } - } - private static BytesReference toXContent(Exception error) throws IOException { try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/transport/Transport.java b/server/src/main/java/org/elasticsearch/transport/Transport.java index 90adf2ab9e7..fc1f0c9e5ec 100644 --- a/server/src/main/java/org/elasticsearch/transport/Transport.java +++ b/server/src/main/java/org/elasticsearch/transport/Transport.java @@ -150,7 +150,7 @@ public interface Transport extends LifecycleComponent { } /** - * This class represents a response context that encapsulates the actual response handler, the action and the conneciton it was + * This class represents a response context that encapsulates the actual response handler, the action and the connection it was * executed on. */ final class ResponseContext { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index f1842b5b0dd..1b2c238098e 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -149,7 +149,7 @@ public class BulkRequestTests extends ESTestCase { IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); assertThat(exc.getMessage(), - containsString("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]")); + containsString("Malformed action/metadata line [5], expected a simple value for field [_unknown] but found [START_ARRAY]")); } public void testSimpleBulk8() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 6c084cb29cd..ff868c3250a 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -1020,7 +1020,7 @@ public class TransportReplicationActionTests extends ESTestCase { // publish a new state (same as the old state with the version incremented) setState(clusterService, stateWithNodes); - // Assert that the request was retried, this time successfull + // Assert that the request was retried, this time successful assertTrue("action should have been successfully called on retry but was not", calledSuccessfully.get()); transportService.stop(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index de3223517b9..7e74a35cf55 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -599,7 +599,7 @@ public class OperationRoutingTests extends ESTestCase{ collector.addNodeStatistics("node_1", 4, TimeValue.timeValueMillis(300).nanos(), TimeValue.timeValueMillis(250).nanos()); groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, outstandingRequests); shardChoice = groupIterator.get(0).nextOrNull(); - // finally, node 2 is choosen instead + // finally, node 2 is chosen instead assertThat(shardChoice.currentNodeId(), equalTo("node_2")); IOUtils.close(clusterService); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java index ba6fe5b9a5a..4d5639a05ea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java @@ -63,7 +63,7 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase { "node2").build()); RoutingTable routingTable = state.routingTable(); - // we can initally only allocate on node2 + // we can initially only allocate on node2 assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); assertEquals(routingTable.index("idx").shard(0).shards().get(0).currentNodeId(), "node2"); routingTable = service.applyFailedShard(state, routingTable.index("idx").shard(0).shards().get(0), randomBoolean()).routingTable(); diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index a052bf0bf42..d357198804c 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -111,8 +111,9 @@ public class DateFormattersTests extends ESTestCase { assertThat(DateFormatters.forPattern("YYYY").withZone(ZoneId.of("CET")), not(equalTo(DateFormatters.forPattern("YYYY")))); // different locale, thus not equals - assertThat(DateFormatters.forPattern("YYYY").withLocale(randomLocale(random())), - not(equalTo(DateFormatters.forPattern("YYYY")))); + DateFormatter f1 = DateFormatters.forPattern("YYYY").withLocale(Locale.CANADA); + DateFormatter f2 = f1.withLocale(Locale.FRENCH); + assertThat(f1, not(equalTo(f2))); // different pattern, thus not equals assertThat(DateFormatters.forPattern("YYYY"), not(equalTo(DateFormatters.forPattern("YY")))); diff --git a/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index d0c051e03bc..9b6816e2ee8 100644 --- a/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -351,7 +351,7 @@ public class BigArraysTests extends ESTestCase { public void testMaxSizeExceededOnResize() throws Exception { for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) { - final int maxSize = randomIntBetween(1 << 10, 1 << 22); + final int maxSize = randomIntBetween(1 << 8, 1 << 14); HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( Settings.builder() .put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) @@ -360,12 +360,18 @@ public class BigArraysTests extends ESTestCase { new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); - final int size = scaledRandomIntBetween(10, maxSize / 8); + final int size = scaledRandomIntBetween(10, maxSize / 16); BigArray array = (BigArray) create.invoke(bigArrays, size); Method resize = BigArrays.class.getMethod("resize", array.getClass().getInterfaces()[0], long.class); - final long newSize = maxSize + 1; - InvocationTargetException e = expectThrows(InvocationTargetException.class, () -> resize.invoke(bigArrays, array, newSize)); - assertTrue(e.getCause() instanceof CircuitBreakingException); + while (true) { + long newSize = array.size() * 2; + try { + array = (BigArray) resize.invoke(bigArrays, array, newSize); + } catch (InvocationTargetException e) { + assertTrue(e.getCause() instanceof CircuitBreakingException); + break; + } + } assertEquals(array.ramBytesUsed(), hcbs.getBreaker(CircuitBreaker.REQUEST).getUsed()); array.close(); assertEquals(0, hcbs.getBreaker(CircuitBreaker.REQUEST).getUsed()); diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java index 712656777f9..75a2e299461 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.util.concurrent; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -30,9 +31,12 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; +import static org.hamcrest.Matchers.is; + public class ListenableFutureTests extends ESTestCase { private ExecutorService executorService; + private ThreadContext threadContext = new ThreadContext(Settings.EMPTY); @After public void stopExecutorService() throws InterruptedException { @@ -46,7 +50,7 @@ public class ListenableFutureTests extends ESTestCase { AtomicInteger notifications = new AtomicInteger(0); final int numberOfListeners = scaledRandomIntBetween(1, 12); for (int i = 0; i < numberOfListeners; i++) { - future.addListener(ActionListener.wrap(notifications::incrementAndGet), EsExecutors.newDirectExecutorService()); + future.addListener(ActionListener.wrap(notifications::incrementAndGet), EsExecutors.newDirectExecutorService(), threadContext); } future.onResponse(""); @@ -63,7 +67,7 @@ public class ListenableFutureTests extends ESTestCase { future.addListener(ActionListener.wrap(s -> fail("this should never be called"), e -> { assertEquals(exception, e); notifications.incrementAndGet(); - }), EsExecutors.newDirectExecutorService()); + }), EsExecutors.newDirectExecutorService(), threadContext); } future.onFailure(exception); @@ -76,7 +80,7 @@ public class ListenableFutureTests extends ESTestCase { final int completingThread = randomIntBetween(0, numberOfThreads - 1); final ListenableFuture future = new ListenableFuture<>(); executorService = EsExecutors.newFixed("testConcurrentListenerRegistrationAndCompletion", numberOfThreads, 1000, - EsExecutors.daemonThreadFactory("listener"), new ThreadContext(Settings.EMPTY)); + EsExecutors.daemonThreadFactory("listener"), threadContext); final CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); final CountDownLatch listenersLatch = new CountDownLatch(numberOfThreads - 1); final AtomicInteger numResponses = new AtomicInteger(0); @@ -85,20 +89,31 @@ public class ListenableFutureTests extends ESTestCase { for (int i = 0; i < numberOfThreads; i++) { final int threadNum = i; Thread thread = new Thread(() -> { + threadContext.putTransient("key", threadNum); try { barrier.await(); if (threadNum == completingThread) { + // we need to do more than just call onResponse as this often results in synchronous + // execution of the listeners instead of actually going async + final int waitTime = randomIntBetween(0, 50); + Thread.sleep(waitTime); + logger.info("completing the future after sleeping {}ms", waitTime); future.onResponse(""); + logger.info("future received response"); } else { + logger.info("adding listener {}", threadNum); future.addListener(ActionListener.wrap(s -> { + logger.info("listener {} received value {}", threadNum, s); assertEquals("", s); + assertThat(threadContext.getTransient("key"), is(threadNum)); numResponses.incrementAndGet(); listenersLatch.countDown(); }, e -> { - logger.error("caught unexpected exception", e); + logger.error(new ParameterizedMessage("listener {} caught unexpected exception", threadNum), e); numExceptions.incrementAndGet(); listenersLatch.countDown(); - }), executorService); + }), executorService, threadContext); + logger.info("listener {} added", threadNum); } barrier.await(); } catch (InterruptedException | BrokenBarrierException e) { diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index e404bcfbe5d..7a24ebaf048 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.env; import org.apache.lucene.index.SegmentInfos; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -186,6 +187,27 @@ public class NodeEnvironmentTests extends ESTestCase { env.close(); } + public void testAvailableIndexFoldersWithExclusions() throws Exception { + final NodeEnvironment env = newNodeEnvironment(); + final int numIndices = randomIntBetween(1, 10); + Set excludedPaths = new HashSet<>(); + Set actualPaths = new HashSet<>(); + for (int i = 0; i < numIndices; i++) { + Index index = new Index("foo" + i, "fooUUID" + i); + for (Path path : env.indexPaths(index)) { + Files.createDirectories(path.resolve(MetaDataStateFormat.STATE_DIR_NAME)); + actualPaths.add(path.getFileName().toString()); + } + if (randomBoolean()) { + excludedPaths.add(env.indexPaths(index)[0].getFileName().toString()); + } + } + + assertThat(Sets.difference(actualPaths, excludedPaths), equalTo(env.availableIndexFolders(excludedPaths::contains))); + assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); + env.close(); + } + public void testResolveIndexFolders() throws Exception { final NodeEnvironment env = newNodeEnvironment(); final int numIndices = randomIntBetween(1, 10); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index ee8f18aa11e..7decbe9024f 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -161,7 +161,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { protected Nested createNested(IndexSearcher searcher, Query parentFilter, Query childFilter) throws IOException { BitsetFilterCache s = indexService.cache().bitsetFilterCache(); - return new Nested(s.getBitSetProducer(parentFilter), childFilter); + return new Nested(s.getBitSetProducer(parentFilter), childFilter, null); } public void testEmpty() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 9e31bd76c30..b17abcc17b3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -37,7 +37,7 @@ public class MultiFieldCopyToMapperTests extends ESTestCase { public void testExceptionForCopyToInMultiFields() throws IOException { XContentBuilder mapping = createMappinmgWithCopyToInMultiField(); - // first check that for newer versions we throw exception if copy_to is found withing multi field + // first check that for newer versions we throw exception if copy_to is found within multi field MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "test"); try { mapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 6be0894186a..01bdcd362fc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -650,8 +650,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject("nested1").field("type", "nested").endObject() .endObject().endObject().endObject()); - Version bwcVersion = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, - Version.V_6_5_0); + Version bwcVersion = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_4_0); for (Version version : new Version[] {Version.V_6_5_0, bwcVersion}) { DocumentMapper docMapper = createIndex("test-" + version, Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), version).build()) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 4b2967553e5..28f1280382a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; @@ -53,6 +54,7 @@ import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class NumberFieldTypeTests extends FieldTypeTestCase { @@ -530,4 +532,49 @@ public class NumberFieldTypeTests extends FieldTypeTestCase { assertEquals(Double.valueOf(1.2), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE).valueForDisplay(1.2)); } + + public void testParsePoint() { + { + byte[] bytes = new byte[Integer.BYTES]; + byte value = randomByte(); + IntPoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.BYTE.parsePoint(bytes), equalTo(value)); + } + { + byte[] bytes = new byte[Integer.BYTES]; + short value = randomShort(); + IntPoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.SHORT.parsePoint(bytes), equalTo(value)); + } + { + byte[] bytes = new byte[Integer.BYTES]; + int value = randomInt(); + IntPoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.INTEGER.parsePoint(bytes), equalTo(value)); + } + { + byte[] bytes = new byte[Long.BYTES]; + long value = randomLong(); + LongPoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.LONG.parsePoint(bytes), equalTo(value)); + } + { + byte[] bytes = new byte[Float.BYTES]; + float value = randomFloat(); + FloatPoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.FLOAT.parsePoint(bytes), equalTo(value)); + } + { + byte[] bytes = new byte[Double.BYTES]; + double value = randomDouble(); + DoublePoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.DOUBLE.parsePoint(bytes), equalTo(value)); + } + { + byte[] bytes = new byte[Float.BYTES]; + float value = 3f; + HalfFloatPoint.encodeDimension(value, bytes, 0); + assertThat(NumberType.HALF_FLOAT.parsePoint(bytes), equalTo(value)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 56a14da845f..579764d671c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterInfoService; @@ -69,6 +70,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerStats; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -579,7 +581,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { // Generate a couple of segments client().prepareIndex("test", "_doc", "1").setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE).get(); - // Use routing so 2 documents are guarenteed to be on the same shard + // Use routing so 2 documents are guaranteed to be on the same shard String routing = randomAlphaOfLength(5); client().prepareIndex("test", "_doc", "2").setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE).setRouting(routing).get(); @@ -809,4 +811,37 @@ public class IndexShardIT extends ESSingleNodeTestCase { assertTrue(notified.get()); } + public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Exception { + createIndex("test", Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + .put("index.refresh_interval", -1).build()); + ensureGreen(); + final IndicesService indicesService = getInstanceFromNode(IndicesService.class); + final IndexShard shard = indicesService.getShardOrNull(new ShardId(resolveIndex("test"), 0)); + final SearchRequest countRequest = new SearchRequest("test").source(new SearchSourceBuilder().size(0)); + final long numDocs = between(10, 20); + for (int i = 0; i < numDocs; i++) { + client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + if (randomBoolean()) { + shard.refresh("test"); + } + } + shard.refresh("test"); + assertThat(client().search(countRequest).actionGet().getHits().totalHits, equalTo(numDocs)); + assertThat(shard.getLocalCheckpoint(), equalTo(shard.seqNoStats().getMaxSeqNo())); + shard.resetEngineToGlobalCheckpoint(); + final long moreDocs = between(10, 20); + for (int i = 0; i < moreDocs; i++) { + client().prepareIndex("test", "_doc", Long.toString(i + numDocs)).setSource("{}", XContentType.JSON).get(); + if (randomBoolean()) { + shard.refresh("test"); + } + } + shard.refresh("test"); + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + assertThat("numDocs=" + numDocs + " moreDocs=" + moreDocs, (long) searcher.reader().numDocs(), equalTo(numDocs + moreDocs)); + } + assertThat("numDocs=" + numDocs + " moreDocs=" + moreDocs, + client().search(countRequest).actionGet().getHits().totalHits, equalTo(numDocs + moreDocs)); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 9d18845a05e..19c25fed072 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2323,7 +2323,7 @@ public class TranslogTests extends ESTestCase { @Override void deleteReaderFiles(TranslogReader reader) { if (fail.fail()) { - // simulate going OOM and dieing just at the wrong moment. + // simulate going OOM and dying just at the wrong moment. throw new RuntimeException("simulated"); } else { super.deleteReaderFiles(reader); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index 08bf43b91bb..4411d3f3e93 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -23,7 +23,9 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; @@ -119,7 +121,11 @@ public class IndicesRequestCacheTests extends ESTestCase { DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); - + if (randomBoolean()) { + writer.flush(); + IOUtils.close(writer); + writer = new IndexWriter(dir, newIndexWriterConfig()); + } writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); @@ -424,14 +430,23 @@ public class IndicesRequestCacheTests extends ESTestCase { assertEquals(0, cache.numRegisteredCloseListeners()); } - public void testEqualsKey() { + public void testEqualsKey() throws IOException { AtomicBoolean trueBoolean = new AtomicBoolean(true); AtomicBoolean falseBoolean = new AtomicBoolean(false); - IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 1L, new TestBytesReference(1)); - IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 1L, new TestBytesReference(1)); - IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), 1L, new TestBytesReference(1)); - IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 2L, new TestBytesReference(1)); - IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 1L, new TestBytesReference(2)); + Directory dir = newDirectory(); + IndexWriterConfig config = newIndexWriterConfig(); + IndexWriter writer = new IndexWriter(dir, config); + IndexReader reader1 = DirectoryReader.open(writer); + IndexReader.CacheKey rKey1 = reader1.getReaderCacheHelper().getKey(); + writer.addDocument(new Document()); + IndexReader reader2 = DirectoryReader.open(writer); + IndexReader.CacheKey rKey2 = reader2.getReaderCacheHelper().getKey(); + IOUtils.close(reader1, reader2, writer, dir); + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); + IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), rKey1, new TestBytesReference(1)); + IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey2, new TestBytesReference(1)); + IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(2)); String s = "Some other random object"; assertEquals(key1, key1); assertEquals(key1, key2); diff --git a/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java index 4a44c518051..aceee1474c6 100644 --- a/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java @@ -69,7 +69,7 @@ public class ProcessProbeTests extends ESTestCase { ProcessStats.Mem mem = stats.getMem(); assertNotNull(mem); - // Commited total virtual memory can return -1 if not supported, let's see which platforms fail + // Committed total virtual memory can return -1 if not supported, let's see which platforms fail assertThat(mem.getTotalVirtual().getBytes(), greaterThan(0L)); } } diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index b64f6ee0ee3..5cfee0a0213 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -109,7 +109,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedNumeric(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedLongs() throws Exception { @@ -147,7 +148,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedNumeric(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedNumeric(Supplier supplier, int maxDoc) throws IOException { @@ -210,11 +212,11 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { SortedNumericDocValues values = supplier.get(); - final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); + final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc, maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); @@ -228,8 +230,12 @@ public class MultiValueModeTests extends ESTestCase { expected = Long.MAX_VALUE; } int numValues = 0; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (int j = 0; j < values.docValueCount(); ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { expected += values.nextValue(); @@ -285,7 +291,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedNumericDouble(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedDoubles() throws Exception { @@ -323,7 +330,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedNumericDouble(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedNumericDouble(Supplier supplier, int maxDoc) throws IOException { @@ -385,11 +393,11 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { SortedNumericDoubleValues values = supplier.get(); - final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); + final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc, maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); @@ -403,8 +411,12 @@ public class MultiValueModeTests extends ESTestCase { expected = Long.MAX_VALUE; } int numValues = 0; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (int j = 0; j < values.docValueCount(); ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { expected += values.nextValue(); @@ -463,7 +475,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedBinary(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedStrings() throws Exception { @@ -501,7 +514,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedBinary(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedBinary(Supplier supplier, int maxDoc) throws IOException { @@ -548,11 +562,11 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedBinaryDocValues values = supplier.get(); - final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); + final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc, maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); @@ -560,8 +574,12 @@ public class MultiValueModeTests extends ESTestCase { verifyBinaryValueCanCalledMoreThanOnce(selected, actual); BytesRef expected = null; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (int j = 0; j < values.docValueCount(); ++j) { if (expected == null) { expected = BytesRef.deepCopyOf(values.nextValue()); @@ -630,7 +648,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedSet(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedSet(multiValues, numDocs, rootDocs, innerDocs); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedOrds() throws Exception { @@ -676,7 +695,8 @@ public class MultiValueModeTests extends ESTestCase { verifySortedSet(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedSet(multiValues, numDocs, rootDocs, innerDocs); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedSet(Supplier supplier, int maxDoc) throws IOException { @@ -715,10 +735,10 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedSetDocValues values = supplier.get(); - final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L)); + final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L), maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { int actual = -1; @@ -727,8 +747,12 @@ public class MultiValueModeTests extends ESTestCase { verifyOrdValueCanCalledMoreThanOnce(selected, actual); } int expected = -1; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { if (expected == -1) { expected = (int) ord; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index d1918c170ed..7d10f49ea86 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -113,7 +113,7 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase ((List) script.get("states")).size()), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 2cee3548aba..c5c99a5b136 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -170,7 +170,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase { iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7))); @@ -96,8 +117,9 @@ public class MaxAggregatorTests extends AggregatorTestCase { }); } - private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) - throws IOException { + private void testCase(Query query, + CheckedConsumer buildIndex, + Consumer verify) throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); buildIndex.accept(indexWriter); @@ -107,10 +129,10 @@ public class MaxAggregatorTests extends AggregatorTestCase { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("number"); - MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + MaxAggregator aggregator = createAggregator(query, aggregationBuilder, indexSearcher, createIndexSettings(), fieldType); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); @@ -119,4 +141,110 @@ public class MaxAggregatorTests extends AggregatorTestCase { indexReader.close(); directory.close(); } + + public void testMaxShortcutRandom() throws Exception { + testMaxShortcutCase( + () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE), + (n) -> new LongPoint("number", n.longValue()), + (v) -> LongPoint.decodeDimension(v, 0)); + + testMaxShortcutCase( + () -> randomInt(), + (n) -> new IntPoint("number", n.intValue()), + (v) -> IntPoint.decodeDimension(v, 0)); + + testMaxShortcutCase( + () -> randomFloat(), + (n) -> new FloatPoint("number", n.floatValue()), + (v) -> FloatPoint.decodeDimension(v, 0)); + + testMaxShortcutCase( + () -> randomDouble(), + (n) -> new DoublePoint("number", n.doubleValue()), + (v) -> DoublePoint.decodeDimension(v, 0)); + } + + private void testMaxShortcutCase(Supplier randomNumber, + Function pointFieldFunc, + Function pointConvertFunc) throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); + IndexWriter indexWriter = new IndexWriter(directory, config); + List documents = new ArrayList<>(); + List> values = new ArrayList<>(); + int numValues = atLeast(50); + int docID = 0; + for (int i = 0; i < numValues; i++) { + int numDup = randomIntBetween(1, 3); + for (int j = 0; j < numDup; j++) { + Document document = new Document(); + Number nextValue = randomNumber.get(); + values.add(new Tuple<>(docID, nextValue)); + document.add(new StringField("id", Integer.toString(docID), Field.Store.NO)); + document.add(pointFieldFunc.apply(nextValue)); + documents.add(document); + docID ++; + } + } + // insert some documents without a value for the metric field. + for (int i = 0; i < 3; i++) { + Document document = new Document(); + documents.add(document); + } + indexWriter.addDocuments(documents); + Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue())); + try (IndexReader reader = DirectoryReader.open(indexWriter)) { + LeafReaderContext ctx = reader.leaves().get(0); + Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); + assertThat(res, equalTo(values.get(values.size()-1).v2())); + } + for (int i = values.size()-1; i > 0; i--) { + indexWriter.deleteDocuments(new Term("id", values.get(i).v1().toString())); + try (IndexReader reader = DirectoryReader.open(indexWriter)) { + LeafReaderContext ctx = reader.leaves().get(0); + Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); + if (res != null) { + assertThat(res, equalTo(values.get(i - 1).v2())); + } else { + assertAllDeleted(ctx.reader().getLiveDocs(), ctx.reader().getPointValues("number")); + } + } + } + indexWriter.deleteDocuments(new Term("id", values.get(0).v1().toString())); + try (IndexReader reader = DirectoryReader.open(indexWriter)) { + LeafReaderContext ctx = reader.leaves().get(0); + Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); + assertThat(res, equalTo(null)); + } + indexWriter.close(); + directory.close(); + } + + // checks that documents inside the max leaves are all deleted + private void assertAllDeleted(Bits liveDocs, PointValues values) throws IOException { + final byte[] maxValue = values.getMaxPackedValue(); + int numBytes = values.getBytesPerDimension(); + final boolean[] seen = new boolean[1]; + values.intersect(new PointValues.IntersectVisitor() { + @Override + public void visit(int docID) { + throw new AssertionError(); + } + + @Override + public void visit(int docID, byte[] packedValue) { + assertFalse(liveDocs.get(docID)); + seen[0] = true; + } + + @Override + public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { + return PointValues.Relation.CELL_CROSSES_QUERY; + } + return PointValues.Relation.CELL_OUTSIDE_QUERY; + } + }); + assertTrue(seen[0]); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java index 5447406f2f2..9c46c1db7ea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java @@ -40,6 +40,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.search.aggregations.AggregationBuilders.count; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; @@ -392,4 +393,22 @@ public class MaxIT extends AbstractNumericTestCase { assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } + + public void testEarlyTermination() throws Exception { + SearchResponse searchResponse = client().prepareSearch("idx") + .setTrackTotalHits(false) + .setQuery(matchAllQuery()) + .addAggregation(max("max").field("values")) + .addAggregation(count("count").field("values")) + .execute().actionGet(); + + Max max = searchResponse.getAggregations().get("max"); + assertThat(max, notNullValue()); + assertThat(max.getName(), equalTo("max")); + assertThat(max.getValue(), equalTo(12.0)); + + ValueCount count = searchResponse.getAggregations().get("count"); + assertThat(count.getName(), equalTo("count")); + assertThat(count.getValue(), equalTo(20L)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index 5b279f1ea49..ad897a2ef32 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -16,20 +16,59 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.FieldContext; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.function.DoubleConsumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class MinAggregatorTests extends AggregatorTestCase { @@ -38,21 +77,27 @@ public class MinAggregatorTests extends AggregatorTestCase { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); Document document = new Document(); document.add(new NumericDocValuesField("number", 9)); + document.add(new LongPoint("number", 9)); indexWriter.addDocument(document); document = new Document(); document.add(new NumericDocValuesField("number", 7)); + document.add(new LongPoint("number", 7)); indexWriter.addDocument(document); document = new Document(); document.add(new NumericDocValuesField("number", 5)); + document.add(new LongPoint("number", 5)); indexWriter.addDocument(document); document = new Document(); document.add(new NumericDocValuesField("number", 3)); + document.add(new LongPoint("number", 3)); indexWriter.addDocument(document); document = new Document(); document.add(new NumericDocValuesField("number", 1)); + document.add(new LongPoint("number", 1)); indexWriter.addDocument(document); document = new Document(); document.add(new NumericDocValuesField("number", -1)); + document.add(new LongPoint("number", -1)); indexWriter.addDocument(document); indexWriter.close(); @@ -63,6 +108,8 @@ public class MinAggregatorTests extends AggregatorTestCase { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); fieldType.setName("number"); + testMinCase(indexSearcher, aggregationBuilder, fieldType, min -> assertEquals(-1.0d, min, 0)); + MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -80,14 +127,20 @@ public class MinAggregatorTests extends AggregatorTestCase { Document document = new Document(); document.add(new SortedNumericDocValuesField("number", 9)); document.add(new SortedNumericDocValuesField("number", 7)); + document.add(new LongPoint("number", 9)); + document.add(new LongPoint("number", 7)); indexWriter.addDocument(document); document = new Document(); document.add(new SortedNumericDocValuesField("number", 5)); document.add(new SortedNumericDocValuesField("number", 3)); + document.add(new LongPoint("number", 5)); + document.add(new LongPoint("number", 3)); indexWriter.addDocument(document); document = new Document(); document.add(new SortedNumericDocValuesField("number", 1)); document.add(new SortedNumericDocValuesField("number", -1)); + document.add(new LongPoint("number", 1)); + document.add(new LongPoint("number", -1)); indexWriter.addDocument(document); indexWriter.close(); @@ -164,4 +217,207 @@ public class MinAggregatorTests extends AggregatorTestCase { directory.close(); } + public void testShortcutIsApplicable() { + for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) { + assertNotNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new MatchAllDocsQuery()), + null, + mockNumericValuesSourceConfig("number", type, true) + ) + ); + assertNotNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + null, + mockNumericValuesSourceConfig("number", type, true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + mockAggregator(), + mockNumericValuesSourceConfig("number", type, true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new TermQuery(new Term("foo", "bar"))), + null, + mockNumericValuesSourceConfig("number", type, true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + mockAggregator(), + mockNumericValuesSourceConfig("number", type, true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + null, + mockNumericValuesSourceConfig("number", type, false) + ) + ); + } + assertNotNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new MatchAllDocsQuery()), + null, + mockDateValuesSourceConfig("number", true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new MatchAllDocsQuery()), + mockAggregator(), + mockDateValuesSourceConfig("number", true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new TermQuery(new Term("foo", "bar"))), + null, + mockDateValuesSourceConfig("number", true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + mockAggregator(), + mockDateValuesSourceConfig("number", true) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + null, + mockDateValuesSourceConfig("number", false) + ) + ); + } + + public void testMinShortcutRandom() throws Exception { + testMinShortcutCase( + () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE), + (n) -> new LongPoint("number", n.longValue()), + (v) -> LongPoint.decodeDimension(v, 0)); + + testMinShortcutCase( + () -> randomInt(), + (n) -> new IntPoint("number", n.intValue()), + (v) -> IntPoint.decodeDimension(v, 0)); + + testMinShortcutCase( + () -> randomFloat(), + (n) -> new FloatPoint("number", n.floatValue()), + (v) -> FloatPoint.decodeDimension(v, 0)); + + testMinShortcutCase( + () -> randomDouble(), + (n) -> new DoublePoint("number", n.doubleValue()), + (v) -> DoublePoint.decodeDimension(v, 0)); + } + + private void testMinCase(IndexSearcher searcher, + AggregationBuilder aggregationBuilder, + MappedFieldType ft, + DoubleConsumer testResult) throws IOException { + Collection queries = Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(ft.name())); + for (Query query : queries) { + MinAggregator aggregator = createAggregator(query, aggregationBuilder, searcher, createIndexSettings(), ft); + aggregator.preCollection(); + searcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + InternalMin result = (InternalMin) aggregator.buildAggregation(0L); + testResult.accept(result.getValue()); + } + } + + private void testMinShortcutCase(Supplier randomNumber, + Function pointFieldFunc, + Function pointConvertFunc) throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); + IndexWriter indexWriter = new IndexWriter(directory, config); + List documents = new ArrayList<>(); + List> values = new ArrayList<>(); + int numValues = atLeast(50); + int docID = 0; + for (int i = 0; i < numValues; i++) { + int numDup = randomIntBetween(1, 3); + for (int j = 0; j < numDup; j++) { + Document document = new Document(); + Number nextValue = randomNumber.get(); + values.add(new Tuple<>(docID, nextValue)); + document.add(new StringField("id", Integer.toString(docID), Field.Store.NO)); + document.add(pointFieldFunc.apply(nextValue)); + document.add(pointFieldFunc.apply(nextValue)); + documents.add(document); + docID ++; + } + } + // insert some documents without a value for the metric field. + for (int i = 0; i < 3; i++) { + Document document = new Document(); + documents.add(document); + } + indexWriter.addDocuments(documents); + Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue())); + try (IndexReader reader = DirectoryReader.open(indexWriter)) { + LeafReaderContext ctx = reader.leaves().get(0); + Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc); + assertThat(res, equalTo(values.get(0).v2())); + } + for (int i = 1; i < values.size(); i++) { + indexWriter.deleteDocuments(new Term("id", values.get(i-1).v1().toString())); + try (IndexReader reader = DirectoryReader.open(indexWriter)) { + LeafReaderContext ctx = reader.leaves().get(0); + Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc); + assertThat(res, equalTo(values.get(i).v2())); + } + } + indexWriter.deleteDocuments(new Term("id", values.get(values.size()-1).v1().toString())); + try (IndexReader reader = DirectoryReader.open(indexWriter)) { + LeafReaderContext ctx = reader.leaves().get(0); + Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc); + assertThat(res, equalTo(null)); + } + indexWriter.close(); + directory.close(); + } + + private SearchContext mockSearchContext(Query query) { + SearchContext searchContext = mock(SearchContext.class); + when(searchContext.query()).thenReturn(query); + return searchContext; + } + + private Aggregator mockAggregator() { + return mock(Aggregator.class); + } + + private ValuesSourceConfig mockNumericValuesSourceConfig(String fieldName, + NumberFieldMapper.NumberType numType, + boolean indexed) { + ValuesSourceConfig config = mock(ValuesSourceConfig.class); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(numType); + ft.setName(fieldName); + ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE); + ft.freeze(); + when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft)); + return config; + } + + private ValuesSourceConfig mockDateValuesSourceConfig(String fieldName, boolean indexed) { + ValuesSourceConfig config = mock(ValuesSourceConfig.class); + MappedFieldType ft = new DateFieldMapper.Builder(fieldName).fieldType(); + ft.setName(fieldName); + ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE); + ft.freeze(); + when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft)); + return config; + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java index d92d212f4d2..7bb0d23c4c2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java @@ -40,6 +40,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.search.aggregations.AggregationBuilders.count; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; @@ -404,4 +405,22 @@ public class MinIT extends AbstractNumericTestCase { assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } + + public void testEarlyTermination() throws Exception { + SearchResponse searchResponse = client().prepareSearch("idx") + .setTrackTotalHits(false) + .setQuery(matchAllQuery()) + .addAggregation(min("min").field("values")) + .addAggregation(count("count").field("values")) + .execute().actionGet(); + + Min min = searchResponse.getAggregations().get("min"); + assertThat(min, notNullValue()); + assertThat(min.getName(), equalTo("min")); + assertThat(min.getValue(), equalTo(2.0)); + + ValueCount count = searchResponse.getAggregations().get("count"); + assertThat(count.getName(), equalTo("count")); + assertThat(count.getValue(), equalTo(20L)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 268f4aeb26d..fc32abe18c7 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -159,7 +159,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase { @@ -275,17 +275,19 @@ public class ScheduleWithFixedDelayTests extends ESTestCase { doneLatch.countDown(); }; - final Cancellable cancellable = threadPool.scheduleWithFixedDelay(countingRunnable, TimeValue.timeValueMillis(10L), Names.GENERIC); + final TimeValue interval = TimeValue.timeValueMillis(50L); + final Cancellable cancellable = threadPool.scheduleWithFixedDelay(countingRunnable, interval, Names.GENERIC); doneLatch.await(); cancellable.cancel(); + final int counterValue = counter.get(); - assertThat(counterValue, isOneOf(iterations, iterations + 1)); + assertThat(counterValue, equalTo(iterations)); if (rarely()) { awaitBusy(() -> { final int value = counter.get(); - return value == iterations || value == iterations + 1; - }, 50L, TimeUnit.MILLISECONDS); + return value == iterations; + }, 5 * interval.millis(), TimeUnit.MILLISECONDS); } } diff --git a/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json b/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json index a642d9ce4fe..669bfd10798 100644 --- a/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json +++ b/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json @@ -2,5 +2,5 @@ {"field1": "value0"} {"index": {"_index": "test", "_type": "doc", "_id": 1}} {"field1": "value1"} -{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unkown": ["foo", "bar"]}} +{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unknown": ["foo", "bar"]}} {"field1": "value2"} diff --git a/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json b/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json index dd544527f52..4b91bcfb36b 100644 --- a/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json +++ b/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json @@ -2,7 +2,7 @@ "person":{ "dynamic_templates":[ { - "tempalte_1":{ + "template_1":{ "match":"multi*", "mapping":{ "type":"{dynamic_type}", diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index c50e7cf066b..1f934aa00b3 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -22,13 +22,13 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.IfConfig; import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.secure_sm.SecureSM; @@ -90,7 +90,7 @@ public class BootstrapForTesting { // check for jar hell try { - final Logger logger = ESLoggerFactory.getLogger(JarHell.class); + final Logger logger = LogManager.getLogger(JarHell.class); JarHell.checkJarHell(logger::debug); } catch (Exception e) { throw new RuntimeException("found jar hell in test classpath", e); diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 12f0d645d8a..bb1efd69973 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -791,15 +791,14 @@ public abstract class EngineTestCase extends ESTestCase { Bits liveDocs = reader.getLiveDocs(); for (int i = 0; i < reader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { + if (primaryTermDocValues.advanceExact(i) == false) { + // We have to skip non-root docs because its _id field is not stored (indexed only). + continue; + } + final long primaryTerm = primaryTermDocValues.longValue(); Document uuid = reader.document(i, Collections.singleton(IdFieldMapper.NAME)); BytesRef binaryID = uuid.getBinaryValue(IdFieldMapper.NAME); String id = Uid.decodeId(Arrays.copyOfRange(binaryID.bytes, binaryID.offset, binaryID.offset + binaryID.length)); - final long primaryTerm; - if (primaryTermDocValues.advanceExact(i)) { - primaryTerm = primaryTermDocValues.longValue(); - } else { - primaryTerm = 0; // non-root documents of a nested document. - } if (seqNoDocValues.advanceExact(i) == false) { throw new AssertionError("seqNoDocValues not found for doc[" + i + "] id[" + id + "]"); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 9021fd1efbb..58fea953850 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -63,6 +63,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.DocIdSeqNoAndTerm; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; @@ -442,13 +443,14 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase public synchronized void close() throws Exception { if (closed == false) { closed = true; - for (IndexShard replica : replicas) { - try { + try { + final List docsOnPrimary = getDocIdAndSeqNos(primary); + for (IndexShard replica : replicas) { assertThat(replica.getMaxSeenAutoIdTimestamp(), equalTo(primary.getMaxSeenAutoIdTimestamp())); assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), greaterThanOrEqualTo(primary.getMaxSeqNoOfUpdatesOrDeletes())); - } catch (AlreadyClosedException ignored) { + assertThat(getDocIdAndSeqNos(replica), equalTo(docsOnPrimary)); } - } + } catch (AlreadyClosedException ignored) { } closeShards(this); } else { throw new AlreadyClosedException("too bad"); diff --git a/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java index 8a98b867a46..3ada3cc93d9 100644 --- a/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java @@ -43,7 +43,7 @@ public class TestThreadInfoPatternConverterTests extends ESTestCase { // Test threads get the test name assertEquals(getTestName(), threadInfo(Thread.currentThread().getName())); - // Suite initalization gets "suite" + // Suite initialization gets "suite" assertEquals("suite", suiteInfo); // And stuff that doesn't match anything gets wrapped in [] so we can see it diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index 4c714c7e9aa..a55470d67a6 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -482,7 +482,7 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase " type: test_type\n" + "warnings:\n" + " - some test warning they are typically pretty long\n" + - " - some other test warning somtimes they have [in] them" + " - some other test warning sometimes they have [in] them" ); DoSection doSection = DoSection.parse(parser); @@ -496,7 +496,7 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase assertThat(doSection.getApiCallSection().getBodies().size(), equalTo(0)); assertThat(doSection.getExpectedWarningHeaders(), equalTo(Arrays.asList( "some test warning they are typically pretty long", - "some other test warning somtimes they have [in] them"))); + "some other test warning sometimes they have [in] them"))); parser = createParser(YamlXContent.yamlXContent, "indices.get_field_mapping:\n" + diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 23f44c560ba..00ab93b8160 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -99,7 +99,7 @@ public class InternalTestClusterTests extends ESTestCase { /** * a set of settings that are expected to have different values betweem clusters, even they have been initialized with the same - * base settins. + * base settings. */ static final Set clusterUniqueSettings = new HashSet<>(); diff --git a/x-pack/docs/en/rest-api/security/ssl.asciidoc b/x-pack/docs/en/rest-api/security/ssl.asciidoc index 6462699570f..3593dbfbbc2 100644 --- a/x-pack/docs/en/rest-api/security/ssl.asciidoc +++ b/x-pack/docs/en/rest-api/security/ssl.asciidoc @@ -34,6 +34,10 @@ The list does not include certificates that are sourced from the default SSL context of the Java Runtime Environment (JRE), even if those certificates are in use within {xpack}. +NOTE: When a PKCS#11 token is configured as the truststore of the JRE, the API +will return all the certificates that are included in the PKCS#11 token +irrespectively to whether these are used in the {es} TLS configuration or not. + If {xpack} is configured to use a keystore or truststore, the API output includes all certificates in that store, even though some of the certificates might not be in active use within the cluster. diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 8c56f2e5da3..acaa85c3c03 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -61,7 +61,7 @@ configuring the `http` attachment type, you must specify the request URL. The `reporting` attachment type is a special type to include PDF rendered dashboards from kibana. This type is consistently polling the kibana app if the dashboard rendering is done, preventing long running HTTP connections, that are potentially -killed by firewalls or load balancers inbetween. +killed by firewalls or load balancers in-between. [source,js] -------------------------------------------------- diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/follower-roles.yml b/x-pack/plugin/ccr/qa/multi-cluster-with-security/follower-roles.yml index 8320143a9fb..be3e6cf5e17 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/follower-roles.yml +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/follower-roles.yml @@ -7,4 +7,4 @@ ccruser: - monitor - read - write - - create_follow_index + - manage_follow_index diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 699837fa643..761260f8ac6 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -104,16 +104,20 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(countCcrNodeTasks(), equalTo(0)); }); - // User does not have create_follow_index index privilege for 'unallowedIndex': - Exception e = expectThrows(ResponseException.class, - () -> follow("leader_cluster:" + unallowedIndex, unallowedIndex)); + assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_close"))); + assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/unfollow"))); + Exception e = expectThrows(ResponseException.class, () -> resumeFollow("leader_cluster:" + allowedIndex, allowedIndex)); + assertThat(e.getMessage(), containsString("follow index [" + allowedIndex + "] does not have ccr metadata")); + + // User does not have manage_follow_index index privilege for 'unallowedIndex': + e = expectThrows(ResponseException.class, () -> follow("leader_cluster:" + unallowedIndex, unallowedIndex)); assertThat(e.getMessage(), containsString("action [indices:admin/xpack/ccr/put_follow] is unauthorized for user [test_ccr]")); // Verify that the follow index has not been created and no node tasks are running assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); - // User does have create_follow_index index privilege on 'allowed' index, + // User does have manage_follow_index index privilege on 'allowed' index, // but not read / monitor roles on 'disallowed' index: e = expectThrows(ResponseException.class, () -> follow("leader_cluster:" + unallowedIndex, allowedIndex)); @@ -131,6 +135,10 @@ public class FollowIndexSecurityIT extends ESRestTestCase { "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); + + e = expectThrows(ResponseException.class, + () -> client().performRequest(new Request("POST", "/" + unallowedIndex + "/_ccr/unfollow"))); + assertThat(e.getMessage(), containsString("action [indices:admin/xpack/ccr/unfollow] is unauthorized for user [test_ccr]")); } } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index bfb6408c160..515534e214b 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -80,6 +80,12 @@ public class FollowIndexIT extends ESRestTestCase { } assertBusy(() -> verifyDocuments(followIndexName, numDocs + 3)); assertBusy(() -> verifyCcrMonitoring(leaderIndexName, followIndexName)); + + pauseFollow(followIndexName); + assertOK(client().performRequest(new Request("POST", "/" + followIndexName + "/_close"))); + assertOK(client().performRequest(new Request("POST", "/" + followIndexName + "/_ccr/unfollow"))); + Exception e = expectThrows(ResponseException.class, () -> resumeFollow("leader_cluster:" + leaderIndexName, followIndexName)); + assertThat(e.getMessage(), containsString("follow index [" + followIndexName + "] does not have ccr metadata")); } } diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml new file mode 100644 index 00000000000..3e5fff7b02d --- /dev/null +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml @@ -0,0 +1,59 @@ +--- +"Test stats": + - do: + indices.create: + index: foo + body: + settings: + index: + soft_deletes: + enabled: true + mappings: + doc: + properties: + field: + type: keyword + + - do: + ccr.follow: + index: bar + body: + leader_index: foo + - is_true: follow_index_created + - is_true: follow_index_shards_acked + - is_true: index_following_started + + # we can not reliably wait for replication to occur so we test the endpoint without indexing any documents + - do: + ccr.stats: + index: bar + - match: { indices.0.index: "bar" } + - match: { indices.0.shards.0.leader_index: "foo" } + - match: { indices.0.shards.0.follower_index: "bar" } + - match: { indices.0.shards.0.shard_id: 0 } + - gte: { indices.0.shards.0.leader_global_checkpoint: -1 } + - gte: { indices.0.shards.0.leader_max_seq_no: -1 } + - gte: { indices.0.shards.0.follower_global_checkpoint: -1 } + - gte: { indices.0.shards.0.follower_max_seq_no: -1 } + - gte: { indices.0.shards.0.last_requested_seq_no: -1 } + - gte: { indices.0.shards.0.number_of_concurrent_reads: 0 } + - match: { indices.0.shards.0.number_of_concurrent_writes: 0 } + - match: { indices.0.shards.0.number_of_queued_writes: 0 } + - gte: { indices.0.shards.0.mapping_version: 0 } + - gte: { indices.0.shards.0.total_fetch_time_millis: 0 } + - gte: { indices.0.shards.0.number_of_successful_fetches: 0 } + - gte: { indices.0.shards.0.number_of_failed_fetches: 0 } + - match: { indices.0.shards.0.operations_received: 0 } + - match: { indices.0.shards.0.total_transferred_bytes: 0 } + - match: { indices.0.shards.0.total_index_time_millis: 0 } + - match: { indices.0.shards.0.number_of_successful_bulk_operations: 0 } + - match: { indices.0.shards.0.number_of_failed_bulk_operations: 0 } + - match: { indices.0.shards.0.number_of_operations_indexed: 0 } + - length: { indices.0.shards.0.fetch_exceptions: 0 } + - gte: { indices.0.shards.0.time_since_last_fetch_millis: -1 } + + - do: + ccr.pause_follow: + index: bar + - is_true: acknowledged + diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml deleted file mode 100644 index 1c1170acf23..00000000000 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml +++ /dev/null @@ -1,57 +0,0 @@ ---- -"Test stats": - - do: - indices.create: - index: foo - body: - settings: - index: - soft_deletes: - enabled: true - mappings: - doc: - properties: - field: - type: keyword - - - do: - ccr.follow: - index: bar - body: - leader_index: foo - - is_true: follow_index_created - - is_true: follow_index_shards_acked - - is_true: index_following_started - - # we can not reliably wait for replication to occur so we test the endpoint without indexing any documents - - do: - ccr.stats: - index: bar - - match: { bar.0.leader_index: "foo" } - - match: { bar.0.shard_id: 0 } - - gte: { bar.0.leader_global_checkpoint: -1 } - - gte: { bar.0.leader_max_seq_no: -1 } - - gte: { bar.0.follower_global_checkpoint: -1 } - - gte: { bar.0.follower_max_seq_no: -1 } - - gte: { bar.0.last_requested_seq_no: -1 } - - gte: { bar.0.number_of_concurrent_reads: 0 } - - match: { bar.0.number_of_concurrent_writes: 0 } - - match: { bar.0.number_of_queued_writes: 0 } - - gte: { bar.0.mapping_version: 0 } - - gte: { bar.0.total_fetch_time_millis: 0 } - - gte: { bar.0.number_of_successful_fetches: 0 } - - gte: { bar.0.number_of_failed_fetches: 0 } - - match: { bar.0.operations_received: 0 } - - match: { bar.0.total_transferred_bytes: 0 } - - match: { bar.0.total_index_time_millis: 0 } - - match: { bar.0.number_of_successful_bulk_operations: 0 } - - match: { bar.0.number_of_failed_bulk_operations: 0 } - - match: { bar.0.number_of_operations_indexed: 0 } - - length: { bar.0.fetch_exceptions: 0 } - - gte: { bar.0.time_since_last_fetch_millis: -1 } - - - do: - ccr.pause_follow: - index: bar - - is_true: acknowledged - diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 7caf144d533..1c1cade2484 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -53,7 +53,7 @@ import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.ccr.action.ShardFollowTasksExecutor; -import org.elasticsearch.xpack.ccr.action.TransportCcrStatsAction; +import org.elasticsearch.xpack.ccr.action.TransportFollowStatsAction; import org.elasticsearch.xpack.ccr.action.TransportPutFollowAction; import org.elasticsearch.xpack.ccr.action.TransportDeleteAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction; @@ -62,7 +62,7 @@ import org.elasticsearch.xpack.ccr.action.TransportPauseFollowAction; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsAction; import org.elasticsearch.xpack.ccr.action.bulk.TransportBulkShardOperationsAction; import org.elasticsearch.xpack.ccr.index.engine.FollowingEngineFactory; -import org.elasticsearch.xpack.ccr.rest.RestCcrStatsAction; +import org.elasticsearch.xpack.ccr.rest.RestFollowStatsAction; import org.elasticsearch.xpack.ccr.rest.RestPutFollowAction; import org.elasticsearch.xpack.ccr.rest.RestDeleteAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestResumeFollowAction; @@ -70,7 +70,7 @@ import org.elasticsearch.xpack.ccr.rest.RestPutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestPauseFollowAction; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; @@ -161,7 +161,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E new ActionHandler<>(BulkShardOperationsAction.INSTANCE, TransportBulkShardOperationsAction.class), new ActionHandler<>(ShardChangesAction.INSTANCE, ShardChangesAction.TransportAction.class), // stats action - new ActionHandler<>(CcrStatsAction.INSTANCE, TransportCcrStatsAction.class), + new ActionHandler<>(FollowStatsAction.INSTANCE, TransportFollowStatsAction.class), new ActionHandler<>(AutoFollowStatsAction.INSTANCE, TransportAutoFollowStatsAction.class), // follow actions new ActionHandler<>(PutFollowAction.INSTANCE, TransportPutFollowAction.class), @@ -184,7 +184,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E return Arrays.asList( // stats API - new RestCcrStatsAction(settings, restController), + new RestFollowStatsAction(settings, restController), new RestAutoFollowStatsAction(settings, restController), // follow APIs new RestPutFollowAction(settings, restController), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index eb31ce25746..4c82c90b2d5 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -288,7 +288,7 @@ public class ShardChangesAction extends Action { IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService) { super(settings, NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, Request::new, ThreadPool.Names.GET); + indexNameExpressionResolver, Request::new, ThreadPool.Names.SEARCH); this.indicesService = indicesService; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 781fb359a45..a10ee10f22a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -238,15 +238,16 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } innerSendShardChangesRequest(from, maxOperationCount, response -> { - if (response.getOperations().length > 0) { - // do not count polls against fetch stats - synchronized (ShardFollowNodeTask.this) { + synchronized (ShardFollowNodeTask.this) { + // Always clear fetch exceptions: + fetchExceptions.remove(from); + if (response.getOperations().length > 0) { + // do not count polls against fetch stats totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); numberOfSuccessfulFetches++; - fetchExceptions.remove(from); operationsReceived += response.getOperations().length; totalTransferredBytes += - Arrays.stream(response.getOperations()).mapToLong(Translog.Operation::estimateSize).sum(); + Arrays.stream(response.getOperations()).mapToLong(Translog.Operation::estimateSize).sum(); } } handleReadResponse(from, maxRequiredSeqNo, response); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java similarity index 70% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java index 394b42789d1..bce471d05e8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import java.io.IOException; import java.util.Arrays; @@ -32,16 +32,16 @@ import java.util.Objects; import java.util.Set; import java.util.function.Consumer; -public class TransportCcrStatsAction extends TransportTasksAction< +public class TransportFollowStatsAction extends TransportTasksAction< ShardFollowNodeTask, - CcrStatsAction.StatsRequest, - CcrStatsAction.StatsResponses, CcrStatsAction.StatsResponse> { + FollowStatsAction.StatsRequest, + FollowStatsAction.StatsResponses, FollowStatsAction.StatsResponse> { private final IndexNameExpressionResolver resolver; private final CcrLicenseChecker ccrLicenseChecker; @Inject - public TransportCcrStatsAction( + public TransportFollowStatsAction( final Settings settings, final ClusterService clusterService, final TransportService transportService, @@ -50,12 +50,12 @@ public class TransportCcrStatsAction extends TransportTasksAction< final CcrLicenseChecker ccrLicenseChecker) { super( settings, - CcrStatsAction.NAME, + FollowStatsAction.NAME, clusterService, transportService, actionFilters, - CcrStatsAction.StatsRequest::new, - CcrStatsAction.StatsResponses::new, + FollowStatsAction.StatsRequest::new, + FollowStatsAction.StatsResponses::new, Ccr.CCR_THREAD_POOL_NAME); this.resolver = Objects.requireNonNull(resolver); this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker); @@ -64,8 +64,8 @@ public class TransportCcrStatsAction extends TransportTasksAction< @Override protected void doExecute( final Task task, - final CcrStatsAction.StatsRequest request, - final ActionListener listener) { + final FollowStatsAction.StatsRequest request, + final ActionListener listener) { if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); return; @@ -74,21 +74,21 @@ public class TransportCcrStatsAction extends TransportTasksAction< } @Override - protected CcrStatsAction.StatsResponses newResponse( - final CcrStatsAction.StatsRequest request, - final List statsRespons, + protected FollowStatsAction.StatsResponses newResponse( + final FollowStatsAction.StatsRequest request, + final List statsRespons, final List taskOperationFailures, final List failedNodeExceptions) { - return new CcrStatsAction.StatsResponses(taskOperationFailures, failedNodeExceptions, statsRespons); + return new FollowStatsAction.StatsResponses(taskOperationFailures, failedNodeExceptions, statsRespons); } @Override - protected CcrStatsAction.StatsResponse readTaskResponse(final StreamInput in) throws IOException { - return new CcrStatsAction.StatsResponse(in); + protected FollowStatsAction.StatsResponse readTaskResponse(final StreamInput in) throws IOException { + return new FollowStatsAction.StatsResponse(in); } @Override - protected void processTasks(final CcrStatsAction.StatsRequest request, final Consumer operation) { + protected void processTasks(final FollowStatsAction.StatsRequest request, final Consumer operation) { final ClusterState state = clusterService.state(); final Set concreteIndices = new HashSet<>(Arrays.asList(resolver.concreteIndexNames(state, request))); for (final Task task : taskManager.getTasks().values()) { @@ -103,10 +103,10 @@ public class TransportCcrStatsAction extends TransportTasksAction< @Override protected void taskOperation( - final CcrStatsAction.StatsRequest request, + final FollowStatsAction.StatsRequest request, final ShardFollowNodeTask task, - final ActionListener listener) { - listener.onResponse(new CcrStatsAction.StatsResponse(task.getStatus())); + final ActionListener listener) { + listener.onResponse(new FollowStatsAction.StatsResponse(task.getStatus())); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java index db71e5b5af8..6d5df143eea 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ccr.action.bulk; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.TransportWriteAction; @@ -25,10 +26,12 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ccr.index.engine.AlreadyProcessedFollowingEngineException; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; +import java.util.function.Function; public class TransportBulkShardOperationsAction extends TransportWriteAction { @@ -66,7 +69,7 @@ public class TransportBulkShardOperationsAction } // public for testing purposes only - public static WritePrimaryResult shardOperationOnPrimary( + public static CcrWritePrimaryResult shardOperationOnPrimary( final ShardId shardId, final String historyUUID, final List sourceOperations, @@ -78,7 +81,7 @@ public class TransportBulkShardOperationsAction "], actual [" + primary.getHistoryUUID() + "], shard is likely restored from snapshot or force allocated"); } - final List targetOperations = sourceOperations.stream().map(operation -> { + final Function rewriteWithTerm = operation -> { final Translog.Operation operationWithPrimaryTerm; switch (operation.opType()) { case INDEX: @@ -111,36 +114,65 @@ public class TransportBulkShardOperationsAction throw new IllegalStateException("unexpected operation type [" + operation.opType() + "]"); } return operationWithPrimaryTerm; - }).collect(Collectors.toList()); + }; + assert maxSeqNoOfUpdatesOrDeletes >= SequenceNumbers.NO_OPS_PERFORMED : "invalid msu [" + maxSeqNoOfUpdatesOrDeletes + "]"; primary.advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes); - final Translog.Location location = applyTranslogOperations(targetOperations, primary, Engine.Operation.Origin.PRIMARY); + + final List appliedOperations = new ArrayList<>(sourceOperations.size()); + Translog.Location location = null; + long waitingForGlobalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; + for (Translog.Operation sourceOp : sourceOperations) { + final Translog.Operation targetOp = rewriteWithTerm.apply(sourceOp); + final Engine.Result result = primary.applyTranslogOperation(targetOp, Engine.Operation.Origin.PRIMARY); + if (result.getResultType() == Engine.Result.Type.SUCCESS) { + assert result.getSeqNo() == targetOp.seqNo(); + appliedOperations.add(targetOp); + location = locationToSync(location, result.getTranslogLocation()); + } else { + if (result.getFailure() instanceof AlreadyProcessedFollowingEngineException) { + // Skipped operations will be delivered to replicas via primary-replica resync or peer-recovery. + // The primary must not acknowledge this request until the global checkpoint is at least the highest + // seqno of all skipped operations (i.e., all skipped operations have been processed on every replica). + waitingForGlobalCheckpoint = SequenceNumbers.max(waitingForGlobalCheckpoint, targetOp.seqNo()); + } else { + assert false : "Only already-processed error should happen; op=[" + targetOp + "] error=[" + result.getFailure() + "]"; + throw ExceptionsHelper.convertToElastic(result.getFailure()); + } + } + } + assert appliedOperations.size() == sourceOperations.size() || waitingForGlobalCheckpoint != SequenceNumbers.UNASSIGNED_SEQ_NO : + "waiting global checkpoint is not assigned; waiting_gcp=" + waitingForGlobalCheckpoint + + " source_ops=" + sourceOperations.size() + " applied_ops=" + sourceOperations.size(); + assert appliedOperations.size() == 0 || location != null; final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest( - shardId, historyUUID, targetOperations, maxSeqNoOfUpdatesOrDeletes); - return new CcrWritePrimaryResult(replicaRequest, location, primary, logger); + shardId, historyUUID, appliedOperations, maxSeqNoOfUpdatesOrDeletes); + return new CcrWritePrimaryResult(replicaRequest, location, primary, waitingForGlobalCheckpoint, logger); } @Override protected WriteReplicaResult shardOperationOnReplica( final BulkShardOperationsRequest request, final IndexShard replica) throws Exception { - assert replica.getMaxSeqNoOfUpdatesOrDeletes() >= request.getMaxSeqNoOfUpdatesOrDeletes() : - "mus on replica [" + replica + "] < mus of request [" + request.getMaxSeqNoOfUpdatesOrDeletes() + "]"; - final Translog.Location location = applyTranslogOperations(request.getOperations(), replica, Engine.Operation.Origin.REPLICA); - return new WriteReplicaResult<>(request, location, null, replica, logger); + return shardOperationOnReplica(request, replica, logger); } // public for testing purposes only - public static Translog.Location applyTranslogOperations( - final List operations, final IndexShard shard, final Engine.Operation.Origin origin) throws IOException { + public static WriteReplicaResult shardOperationOnReplica( + final BulkShardOperationsRequest request, final IndexShard replica, final Logger logger) throws IOException { + assert replica.getMaxSeqNoOfUpdatesOrDeletes() >= request.getMaxSeqNoOfUpdatesOrDeletes() : + "mus on replica [" + replica + "] < mus of request [" + request.getMaxSeqNoOfUpdatesOrDeletes() + "]"; Translog.Location location = null; - for (final Translog.Operation operation : operations) { - final Engine.Result result = shard.applyTranslogOperation(operation, origin); + for (final Translog.Operation operation : request.getOperations()) { + final Engine.Result result = replica.applyTranslogOperation(operation, Engine.Operation.Origin.REPLICA); + if (result.getResultType() != Engine.Result.Type.SUCCESS) { + assert false : "doc-level failure must not happen on replicas; op[" + operation + "] error[" + result.getFailure() + "]"; + throw ExceptionsHelper.convertToElastic(result.getFailure()); + } assert result.getSeqNo() == operation.seqNo(); - assert result.getResultType() == Engine.Result.Type.SUCCESS; location = locationToSync(location, result.getTranslogLocation()); } - assert operations.size() == 0 || location != null; - return location; + assert request.getOperations().size() == 0 || location != null; + return new WriteReplicaResult<>(request, location, null, replica, logger); } @Override @@ -151,20 +183,37 @@ public class TransportBulkShardOperationsAction /** * Custom write result to include global checkpoint after ops have been replicated. */ - static class CcrWritePrimaryResult extends WritePrimaryResult { + static final class CcrWritePrimaryResult extends WritePrimaryResult { + final long waitingForGlobalCheckpoint; - CcrWritePrimaryResult(BulkShardOperationsRequest request, Translog.Location location, IndexShard primary, Logger logger) { + CcrWritePrimaryResult(BulkShardOperationsRequest request, Translog.Location location, IndexShard primary, + long waitingForGlobalCheckpoint, Logger logger) { super(request, new BulkShardOperationsResponse(), location, null, primary, logger); + this.waitingForGlobalCheckpoint = waitingForGlobalCheckpoint; } @Override public synchronized void respond(ActionListener listener) { - final BulkShardOperationsResponse response = finalResponseIfSuccessful; - final SeqNoStats seqNoStats = primary.seqNoStats(); - // return a fresh global checkpoint after the operations have been replicated for the shard follow task - response.setGlobalCheckpoint(seqNoStats.getGlobalCheckpoint()); - response.setMaxSeqNo(seqNoStats.getMaxSeqNo()); - listener.onResponse(response); + final ActionListener wrappedListener = ActionListener.wrap(response -> { + final SeqNoStats seqNoStats = primary.seqNoStats(); + // return a fresh global checkpoint after the operations have been replicated for the shard follow task + response.setGlobalCheckpoint(seqNoStats.getGlobalCheckpoint()); + response.setMaxSeqNo(seqNoStats.getMaxSeqNo()); + listener.onResponse(response); + }, listener::onFailure); + + if (waitingForGlobalCheckpoint != SequenceNumbers.UNASSIGNED_SEQ_NO) { + primary.addGlobalCheckpointListener(waitingForGlobalCheckpoint, (gcp, e) -> { + if (e != null) { + listener.onFailure(e); + } else { + assert waitingForGlobalCheckpoint <= gcp : waitingForGlobalCheckpoint + " > " + gcp; + super.respond(wrappedListener); + } + }, null); + } else { + super.respond(wrappedListener); + } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/AlreadyProcessedFollowingEngineException.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/AlreadyProcessedFollowingEngineException.java new file mode 100644 index 00000000000..9e19c93b286 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/AlreadyProcessedFollowingEngineException.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.index.engine; + +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.shard.ShardId; + +public final class AlreadyProcessedFollowingEngineException extends VersionConflictEngineException { + AlreadyProcessedFollowingEngineException(ShardId shardId, long seqNo) { + super(shardId, "operation [{}] was processed before", null, seqNo); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java index 458461f3c84..8a413ce4980 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java @@ -58,8 +58,21 @@ public final class FollowingEngine extends InternalEngine { final long maxSeqNoOfUpdatesOrDeletes = getMaxSeqNoOfUpdatesOrDeletes(); assert maxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "max_seq_no_of_updates is not initialized"; if (hasBeenProcessedBefore(index)) { - return IndexingStrategy.processButSkipLucene(false, index.seqNo(), index.version()); - + if (index.origin() == Operation.Origin.PRIMARY) { + /* + * The existing operation in this engine was probably assigned the term of the previous primary shard which is different + * from the term of the current operation. If the current operation arrives on replicas before the previous operation, + * then the Lucene content between the primary and replicas are not identical (primary terms are different). Since the + * existing operations are guaranteed to be replicated to replicas either via peer-recovery or primary-replica resync, + * we can safely skip this operation here and let the caller know the decision via AlreadyProcessedFollowingEngineException. + * The caller then waits for the global checkpoint to advance at least the seq_no of this operation to make sure that + * the existing operation was replicated to all replicas (see TransportBulkShardOperationsAction#shardOperationOnPrimary). + */ + final AlreadyProcessedFollowingEngineException error = new AlreadyProcessedFollowingEngineException(shardId, index.seqNo()); + return IndexingStrategy.skipDueToVersionConflict(error, false, index.version(), index.primaryTerm()); + } else { + return IndexingStrategy.processButSkipLucene(false, index.seqNo(), index.version()); + } } else if (maxSeqNoOfUpdatesOrDeletes <= getLocalCheckpoint()) { assert maxSeqNoOfUpdatesOrDeletes < index.seqNo() : "seq_no[" + index.seqNo() + "] <= msu[" + maxSeqNoOfUpdatesOrDeletes + "]"; numOfOptimizedIndexing.inc(); @@ -73,7 +86,19 @@ public final class FollowingEngine extends InternalEngine { @Override protected InternalEngine.DeletionStrategy deletionStrategyForOperation(final Delete delete) throws IOException { preFlight(delete); - return planDeletionAsNonPrimary(delete); + if (delete.origin() == Operation.Origin.PRIMARY && hasBeenProcessedBefore(delete)) { + // See the comment in #indexingStrategyForOperation for the explanation why we can safely skip this operation. + final AlreadyProcessedFollowingEngineException error = new AlreadyProcessedFollowingEngineException(shardId, delete.seqNo()); + return DeletionStrategy.skipDueToVersionConflict(error, delete.version(), delete.primaryTerm(), false); + } else { + return planDeletionAsNonPrimary(delete); + } + } + + @Override + public NoOpResult noOp(NoOp noOp) { + // TODO: Make sure we process NoOp once. + return super.noOp(noOp); } @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java similarity index 73% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java index 976d8ba2074..25572894af3 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java @@ -13,13 +13,13 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import java.io.IOException; -public class RestCcrStatsAction extends BaseRestHandler { +public class RestFollowStatsAction extends BaseRestHandler { - public RestCcrStatsAction(final Settings settings, final RestController controller) { + public RestFollowStatsAction(final Settings settings, final RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.GET, "/_ccr/stats", this); controller.registerHandler(RestRequest.Method.GET, "/{index}/_ccr/stats", this); @@ -32,9 +32,9 @@ public class RestCcrStatsAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { - final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); + final FollowStatsAction.StatsRequest request = new FollowStatsAction.StatsRequest(); request.setIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); - return channel -> client.execute(CcrStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(FollowStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index 7e5ae092408..eb0b29912b2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -22,7 +22,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; @@ -88,21 +88,24 @@ public class CcrLicenseIT extends ESSingleNodeTestCase { latch.await(); } - public void testThatCcrStatsAreUnavailableWithNonCompliantLicense() throws InterruptedException { + public void testThatFollowStatsAreUnavailableWithNonCompliantLicense() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); - client().execute(CcrStatsAction.INSTANCE, new CcrStatsAction.StatsRequest(), new ActionListener() { - @Override - public void onResponse(final CcrStatsAction.StatsResponses statsResponses) { - latch.countDown(); - fail(); - } + client().execute( + FollowStatsAction.INSTANCE, + new FollowStatsAction.StatsRequest(), + new ActionListener() { + @Override + public void onResponse(final FollowStatsAction.StatsResponses statsResponses) { + latch.countDown(); + fail(); + } - @Override - public void onFailure(final Exception e) { - assertNonCompliantLicense(e); - latch.countDown(); - } - }); + @Override + public void onFailure(final Exception e) { + assertNonCompliantLicense(e); + latch.countDown(); + } + }); latch.await(); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java index f4d098f41ab..6a4b6191666 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -47,11 +49,13 @@ import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; @@ -59,9 +63,9 @@ import org.elasticsearch.xpack.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.ccr.index.engine.FollowingEngine; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction.StatsRequest; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction.StatsResponses; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction.StatsRequest; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction.StatsResponses; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; @@ -79,6 +83,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -117,6 +122,14 @@ public class ShardChangesIT extends ESIntegTestCase { return Arrays.asList(LocalStateCcr.class, CommonAnalysisPlugin.class); } + @Override + protected void beforeIndexDeletion() throws Exception { + super.beforeIndexDeletion(); + assertSeqNos(); + assertSameDocIdsOnShards(); + internalCluster().assertConsistentHistoryBetweenTranslogAndLuceneIndex(); + } + @Override protected boolean ignoreExternalCluster() { return true; @@ -360,7 +373,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), numberOfShards); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33337") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/34412") public void testFollowIndexAndCloseNode() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); String leaderIndexSettings = getIndexSettings(3, 1, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); @@ -455,8 +468,10 @@ public class ShardChangesIT extends ESIntegTestCase { } public void testFollowNonExistentIndex() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test-leader").get()); - assertAcked(client().admin().indices().prepareCreate("test-follower").get()); + String indexSettings = getIndexSettings(1, 0, Collections.emptyMap()); + assertAcked(client().admin().indices().prepareCreate("test-leader").setSource(indexSettings, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test-follower").setSource(indexSettings, XContentType.JSON).get()); + ensureGreen("test-leader", "test-follower"); // Leader index does not exist. ResumeFollowAction.Request followRequest1 = resumeFollow("non-existent-leader", "test-follower"); expectThrows(IndexNotFoundException.class, () -> client().execute(ResumeFollowAction.INSTANCE, followRequest1).actionGet()); @@ -570,7 +585,7 @@ public class ShardChangesIT extends ESIntegTestCase { client().admin().indices().close(new CloseIndexRequest("index1")).actionGet(); assertBusy(() -> { - StatsResponses response = client().execute(CcrStatsAction.INSTANCE, new StatsRequest()).actionGet(); + StatsResponses response = client().execute(FollowStatsAction.INSTANCE, new StatsRequest()).actionGet(); assertThat(response.getNodeFailures(), empty()); assertThat(response.getTaskFailures(), empty()); assertThat(response.getStatsResponses(), hasSize(1)); @@ -605,7 +620,7 @@ public class ShardChangesIT extends ESIntegTestCase { client().admin().indices().close(new CloseIndexRequest("index2")).actionGet(); client().prepareIndex("index1", "doc", "2").setSource("{}", XContentType.JSON).get(); assertBusy(() -> { - StatsResponses response = client().execute(CcrStatsAction.INSTANCE, new StatsRequest()).actionGet(); + StatsResponses response = client().execute(FollowStatsAction.INSTANCE, new StatsRequest()).actionGet(); assertThat(response.getNodeFailures(), empty()); assertThat(response.getTaskFailures(), empty()); assertThat(response.getStatsResponses(), hasSize(1)); @@ -681,6 +696,57 @@ public class ShardChangesIT extends ESIntegTestCase { assertThat(client().prepareSearch("index2").get().getHits().getTotalHits(), equalTo(2L)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/34412") + public void testFailOverOnFollower() throws Exception { + int numberOfReplicas = between(1, 2); + internalCluster().startMasterOnlyNode(); + internalCluster().startDataOnlyNodes(numberOfReplicas + between(1, 2)); + String leaderIndexSettings = getIndexSettings(1, numberOfReplicas, + singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); + assertAcked(client().admin().indices().prepareCreate("leader-index").setSource(leaderIndexSettings, XContentType.JSON)); + ensureGreen("leader-index"); + AtomicBoolean stopped = new AtomicBoolean(); + Thread[] threads = new Thread[between(1, 8)]; + AtomicInteger docID = new AtomicInteger(); + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + while (stopped.get() == false) { + try { + if (frequently()) { + String id = Integer.toString(frequently() ? docID.incrementAndGet() : between(0, 10)); // sometimes update + client().prepareIndex("leader-index", "doc", id).setSource("{\"f\":" + id + "}", XContentType.JSON).get(); + } else { + String id = Integer.toString(between(0, docID.get())); + client().prepareDelete("leader-index", "doc", id).get(); + } + } catch (NodeClosedException ignored) { + } + } + }); + threads[i].start(); + } + PutFollowAction.Request follow = follow("leader-index", "follower-index"); + client().execute(PutFollowAction.INSTANCE, follow).get(); + ensureGreen("follower-index"); + atLeastDocsIndexed("follower-index", between(20, 60)); + final ClusterState clusterState = clusterService().state(); + for (ShardRouting shardRouting : clusterState.routingTable().allShards("follower-index")) { + if (shardRouting.primary()) { + DiscoveryNode assignedNode = clusterState.nodes().get(shardRouting.currentNodeId()); + internalCluster().restartNode(assignedNode.getName(), new InternalTestCluster.RestartCallback()); + break; + } + } + ensureGreen("follower-index"); + atLeastDocsIndexed("follower-index", between(80, 150)); + stopped.set(true); + for (Thread thread : threads) { + thread.join(); + } + assertSameDocCount("leader-index", "follower-index"); + unfollowIndex("follower-index"); + } + private CheckedRunnable assertTask(final int numberOfPrimaryShards, final Map numDocsPerShard) { return () -> { final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java index 3c21abcfbf5..f86594b3b69 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java @@ -17,6 +17,6 @@ public class PutFollowActionRequestTests extends AbstractStreamableTestCase { +public class ResumeFollowActionRequestTests extends AbstractStreamableXContentTestCase { @Override protected ResumeFollowAction.Request createBlankInstance() { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index 8727f8b907b..e772516e331 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -190,6 +190,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersions.add(1L); leaderGlobalCheckpoints.add(63L); maxSeqNos.add(63L); + responseSizes.add(64); simulateResponse.set(true); final AtomicLong retryCounter = new AtomicLong(); // before each retry, we assert the fetch failures; after the last retry, the fetch failure should clear @@ -228,6 +229,35 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } + public void testEmptyShardChangesResponseShouldClearFetchException() { + ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + startTask(task, -1, -1); + + readFailures.add(new ShardNotFoundException(new ShardId("leader_index", "", 0))); + mappingVersions.add(1L); + leaderGlobalCheckpoints.add(-1L); + maxSeqNos.add(-1L); + simulateResponse.set(true); + task.coordinateReads(); + + // number of requests is equal to initial request + retried attempts + assertThat(shardChangesRequests.size(), equalTo(2)); + for (long[] shardChangesRequest : shardChangesRequests) { + assertThat(shardChangesRequest[0], equalTo(0L)); + assertThat(shardChangesRequest[1], equalTo(64L)); + } + + assertFalse("task is not stopped", task.isStopped()); + ShardFollowNodeTaskStatus status = task.getStatus(); + assertThat(status.numberOfConcurrentReads(), equalTo(1)); + assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.numberOfFailedFetches(), equalTo(1L)); + // the fetch failure should have been cleared: + assertThat(status.fetchExceptions().entrySet(), hasSize(0)); + assertThat(status.lastRequestedSeqNo(), equalTo(-1L)); + assertThat(status.leaderGlobalCheckpoint(), equalTo(-1L)); + } + public void testReceiveTimeout() { final ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); startTask(task, 63, -1); @@ -262,6 +292,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersions.add(1L); leaderGlobalCheckpoints.add(63L); maxSeqNos.add(63L); + responseSizes.add(64); simulateResponse.set(true); task.coordinateReads(); @@ -742,7 +773,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { if (readFailure != null) { errorHandler.accept(readFailure); } else if (simulateResponse.get()) { - final int responseSize = responseSizes.size() == 0 ? requestBatchSize : responseSizes.poll(); + final int responseSize = responseSizes.size() == 0 ? 0 : responseSizes.poll(); final Translog.Operation[] operations = new Translog.Operation[responseSize]; for (int i = 0; i < responseSize; i++) { operations[i] = new Translog.NoOp(from + i, 0, "test"); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index 055005b9e7d..5539bc6ae47 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -12,19 +12,22 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.replication.TransportWriteAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.engine.Engine.Operation.Origin; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.replication.ESIndexLevelReplicationTestCase; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.threadpool.ThreadPool; @@ -37,7 +40,9 @@ import org.elasticsearch.xpack.ccr.index.engine.FollowingEngineFactory; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; @@ -45,6 +50,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.LongConsumer; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -221,6 +227,58 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/34412") + public void testRetryBulkShardOperations() throws Exception { + try (ReplicationGroup leaderGroup = createGroup(between(0, 1)); + ReplicationGroup followerGroup = createFollowGroup(between(1, 3))) { + leaderGroup.startAll(); + followerGroup.startAll(); + leaderGroup.appendDocs(between(10, 100)); + leaderGroup.refresh("test"); + for (String deleteId : randomSubsetOf(IndexShardTestCase.getShardDocUIDs(leaderGroup.getPrimary()))) { + BulkItemResponse resp = leaderGroup.delete(new DeleteRequest("test", "type", deleteId)); + assertThat(resp.getFailure(), nullValue()); + } + leaderGroup.syncGlobalCheckpoint(); + IndexShard leadingPrimary = leaderGroup.getPrimary(); + // Simulates some bulk requests are completed on the primary and replicated to some (but all) replicas of the follower + // but the primary of the follower crashed before these requests completed. + for (int numBulks = between(1, 5), i = 0; i < numBulks; i++) { + long fromSeqNo = randomLongBetween(0, leadingPrimary.getGlobalCheckpoint()); + long toSeqNo = randomLongBetween(fromSeqNo, leadingPrimary.getGlobalCheckpoint()); + int numOps = Math.toIntExact(toSeqNo + 1 - fromSeqNo); + Translog.Operation[] ops = ShardChangesAction.getOperations(leadingPrimary, leadingPrimary.getGlobalCheckpoint(), + fromSeqNo, numOps, leadingPrimary.getHistoryUUID(), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES)); + + IndexShard followingPrimary = followerGroup.getPrimary(); + TransportWriteAction.WritePrimaryResult primaryResult = + TransportBulkShardOperationsAction.shardOperationOnPrimary(followingPrimary.shardId(), + followingPrimary.getHistoryUUID(), Arrays.asList(ops), leadingPrimary.getMaxSeqNoOfUpdatesOrDeletes(), + followingPrimary, logger); + for (IndexShard replica : randomSubsetOf(followerGroup.getReplicas())) { + final PlainActionFuture permitFuture = new PlainActionFuture<>(); + replica.acquireReplicaOperationPermit(followingPrimary.getOperationPrimaryTerm(), + followingPrimary.getGlobalCheckpoint(), followingPrimary.getMaxSeqNoOfUpdatesOrDeletes(), + permitFuture, ThreadPool.Names.SAME, primaryResult); + try (Releasable ignored = permitFuture.get()) { + TransportBulkShardOperationsAction.shardOperationOnReplica(primaryResult.replicaRequest(), replica, logger); + } + } + } + // A follow-task retries these requests while the primary-replica resync is happening on the follower. + followerGroup.promoteReplicaToPrimary(randomFrom(followerGroup.getReplicas())); + ShardFollowNodeTask shardFollowTask = createShardFollowTask(leaderGroup, followerGroup); + SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats(); + shardFollowTask.start(followerGroup.getPrimary().getHistoryUUID(), leadingPrimary.getGlobalCheckpoint(), + leadingPrimary.getMaxSeqNoOfUpdatesOrDeletes(), followerSeqNoStats.getGlobalCheckpoint(), followerSeqNoStats.getMaxSeqNo()); + assertBusy(() -> { + assertThat(followerGroup.getPrimary().getGlobalCheckpoint(), equalTo(leadingPrimary.getGlobalCheckpoint())); + assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup); + }); + shardFollowTask.markAsCompleted(); + } + } + @Override protected ReplicationGroup createGroup(int replicas, Settings settings) throws IOException { Settings newSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -366,13 +424,29 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest }; } - private void assertConsistentHistoryBetweenLeaderAndFollower(ReplicationGroup leader, ReplicationGroup follower) throws IOException { - int totalOps = leader.getPrimary().estimateNumberOfHistoryOperations("test", 0); - for (IndexShard followingShard : follower) { - assertThat(followingShard.estimateNumberOfHistoryOperations("test", 0), equalTo(totalOps)); + private void assertConsistentHistoryBetweenLeaderAndFollower(ReplicationGroup leader, ReplicationGroup follower) throws Exception { + final List> docAndSeqNosOnLeader = getDocIdAndSeqNos(leader.getPrimary()).stream() + .map(d -> Tuple.tuple(d.getId(), d.getSeqNo())).collect(Collectors.toList()); + final Set> operationsOnLeader = new HashSet<>(); + try (Translog.Snapshot snapshot = leader.getPrimary().getHistoryOperations("test", 0)) { + Translog.Operation op; + while ((op = snapshot.next()) != null) { + operationsOnLeader.add(Tuple.tuple(op.seqNo(), op.opType())); + } } for (IndexShard followingShard : follower) { assertThat(followingShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getPrimary().getMaxSeqNoOfUpdatesOrDeletes())); + List> docAndSeqNosOnFollower = getDocIdAndSeqNos(followingShard).stream() + .map(d -> Tuple.tuple(d.getId(), d.getSeqNo())).collect(Collectors.toList()); + assertThat(docAndSeqNosOnFollower, equalTo(docAndSeqNosOnLeader)); + final Set> operationsOnFollower = new HashSet<>(); + try (Translog.Snapshot snapshot = followingShard.getHistoryOperations("test", 0)) { + Translog.Operation op; + while ((op = snapshot.next()) != null) { + operationsOnFollower.add(Tuple.tuple(op.seqNo(), op.opType())); + } + } + assertThat(operationsOnFollower, equalTo(operationsOnLeader)); } } @@ -384,15 +458,24 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest @Override protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardOperationsRequest request) throws Exception { - TransportWriteAction.WritePrimaryResult result = - TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getHistoryUUID(), + final PlainActionFuture permitFuture = new PlainActionFuture<>(); + primary.acquirePrimaryOperationPermit(permitFuture, ThreadPool.Names.SAME, request); + final TransportWriteAction.WritePrimaryResult ccrResult; + try (Releasable ignored = permitFuture.get()) { + ccrResult = TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getHistoryUUID(), request.getOperations(), request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger); - return new PrimaryResult(result.replicaRequest(), result.finalResponseIfSuccessful); + } + return new PrimaryResult(ccrResult.replicaRequest(), ccrResult.finalResponseIfSuccessful) { + @Override + public void respond(ActionListener listener) { + ccrResult.respond(listener); + } + }; } @Override protected void performOnReplica(BulkShardOperationsRequest request, IndexShard replica) throws Exception { - TransportBulkShardOperationsAction.applyTranslogOperations(request.getOperations(), replica, Origin.REPLICA); + TransportBulkShardOperationsAction.shardOperationOnReplica(request, replica, logger); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsRequestTests.java index ea1e8874914..97c2b26a4a7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsRequestTests.java @@ -6,18 +6,18 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; -public class StatsRequestTests extends AbstractStreamableTestCase { +public class StatsRequestTests extends AbstractStreamableTestCase { @Override - protected CcrStatsAction.StatsRequest createBlankInstance() { - return new CcrStatsAction.StatsRequest(); + protected FollowStatsAction.StatsRequest createBlankInstance() { + return new FollowStatsAction.StatsRequest(); } @Override - protected CcrStatsAction.StatsRequest createTestInstance() { - CcrStatsAction.StatsRequest statsRequest = new CcrStatsAction.StatsRequest(); + protected FollowStatsAction.StatsRequest createTestInstance() { + FollowStatsAction.StatsRequest statsRequest = new FollowStatsAction.StatsRequest(); if (randomBoolean()) { statsRequest.setIndices(generateRandomStringArray(8, 4, false)); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java index b79f8db1923..1de949b850b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java @@ -7,23 +7,23 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import java.util.ArrayList; import java.util.Collections; import java.util.List; -public class StatsResponsesTests extends AbstractStreamableTestCase { +public class StatsResponsesTests extends AbstractStreamableTestCase { @Override - protected CcrStatsAction.StatsResponses createBlankInstance() { - return new CcrStatsAction.StatsResponses(); + protected FollowStatsAction.StatsResponses createBlankInstance() { + return new FollowStatsAction.StatsResponses(); } @Override - protected CcrStatsAction.StatsResponses createTestInstance() { + protected FollowStatsAction.StatsResponses createTestInstance() { int numResponses = randomIntBetween(0, 8); - List responses = new ArrayList<>(numResponses); + List responses = new ArrayList<>(numResponses); for (int i = 0; i < numResponses; i++) { ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus( randomAlphaOfLength(4), @@ -49,8 +49,8 @@ public class StatsResponsesTests extends AbstractStreamableTestCase listener = new PlainActionFuture<>(); + CcrWritePrimaryResult primaryResult = new CcrWritePrimaryResult(request, null, shard, -2, logger); + primaryResult.respond(listener); + assertThat("should return intermediately if waiting_global_checkpoint is not specified", listener.isDone(), equalTo(true)); + assertThat(listener.get().getMaxSeqNo(), equalTo(shard.seqNoStats().getMaxSeqNo())); + } + { + PlainActionFuture listener = new PlainActionFuture<>(); + long waitingForGlobalCheckpoint = randomLongBetween(shard.getGlobalCheckpoint() + 1, shard.getLocalCheckpoint()); + CcrWritePrimaryResult primaryResult = new CcrWritePrimaryResult(request, null, shard, waitingForGlobalCheckpoint, logger); + primaryResult.respond(listener); + assertThat(listener.isDone(), equalTo(false)); + expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TimeValue.timeValueMillis(1))); + + shard.updateGlobalCheckpointOnReplica(randomLongBetween(shard.getGlobalCheckpoint(), waitingForGlobalCheckpoint - 1), "test"); + expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TimeValue.timeValueMillis(1))); + + shard.updateGlobalCheckpointOnReplica(randomLongBetween(waitingForGlobalCheckpoint, shard.getLocalCheckpoint()), "test"); + assertThat(listener.get().getMaxSeqNo(), equalTo(shard.seqNoStats().getMaxSeqNo())); + assertThat(listener.get().getGlobalCheckpoint(), equalTo(shard.getGlobalCheckpoint())); + } + { + PlainActionFuture listener = new PlainActionFuture<>(); + long waitingForGlobalCheckpoint = randomLongBetween(-1, shard.getGlobalCheckpoint()); + CcrWritePrimaryResult primaryResult = new CcrWritePrimaryResult(request, null, shard, waitingForGlobalCheckpoint, logger); + primaryResult.respond(listener); + assertThat(listener.get().getMaxSeqNo(), equalTo(shard.seqNoStats().getMaxSeqNo())); + assertThat(listener.get().getGlobalCheckpoint(), equalTo(shard.getGlobalCheckpoint())); + } + closeShards(shard); + } + + public void testPrimaryResultIncludeOnlyAppliedOperations() throws Exception { + final Settings settings = Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(); + final IndexShard primary = newStartedShard(true, settings, new FollowingEngineFactory()); + long seqno = 0; + List firstBulk = new ArrayList<>(); + List secondBulk = new ArrayList<>(); + for (int numOps = between(1, 100), i = 0; i < numOps; i++) { + final String id = Integer.toString(between(1, 100)); + final Translog.Operation op; + if (randomBoolean()) { + op = new Translog.Index("_doc", id, seqno++, primaryTerm, 0, SOURCE, null, -1); + } else { + op = new Translog.Delete("_doc", id, new Term("_id", Uid.encodeId(id)), seqno++, primaryTerm, 0); + } + if (randomBoolean()) { + firstBulk.add(op); + } else { + secondBulk.add(op); + } + } + Randomness.shuffle(firstBulk); + Randomness.shuffle(secondBulk); + primary.advanceMaxSeqNoOfUpdatesOrDeletes(seqno); + + final CcrWritePrimaryResult fullResult = TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), + primary.getHistoryUUID(), firstBulk, seqno, primary, logger); + assertThat(fullResult.replicaRequest().getOperations(), + equalTo(rewriteWithPrimaryTerm(firstBulk, primary.getOperationPrimaryTerm()))); + assertThat(fullResult.waitingForGlobalCheckpoint, equalTo(-2L)); + + // This bulk includes some operations from the first bulk. These operations should not be included in the result. + final List existingOps = randomSubsetOf(firstBulk); + final CcrWritePrimaryResult partialResult = TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), + primary.getHistoryUUID(), Stream.concat(existingOps.stream(), secondBulk.stream()).collect(Collectors.toList()), + seqno, primary, logger); + assertThat(partialResult.replicaRequest().getOperations(), + equalTo(rewriteWithPrimaryTerm(secondBulk, primary.getOperationPrimaryTerm()))); + assertThat(partialResult.waitingForGlobalCheckpoint, + equalTo(existingOps.stream().mapToLong(Translog.Operation::seqNo).max().orElse(-2L))); + + closeShards(primary); + } + + private List rewriteWithPrimaryTerm(List sourceOperations, long primaryTerm) { + return sourceOperations.stream().map(op -> { + switch (op.opType()) { + case INDEX: + final Translog.Index index = (Translog.Index) op; + return new Translog.Index(index.type(), index.id(), index.seqNo(), primaryTerm, + index.version(), BytesReference.toBytes(index.source()), index.routing(), index.getAutoGeneratedIdTimestamp()); + case DELETE: + final Translog.Delete delete = (Translog.Delete) op; + return new Translog.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), primaryTerm, delete.version()); + case NO_OP: + final Translog.NoOp noOp = (Translog.NoOp) op; + return new Translog.NoOp(noOp.seqNo(), primaryTerm, noOp.reason()); + default: + throw new IllegalStateException("unexpected operation type [" + op.opType() + "]"); + } + }).collect(Collectors.toList()); + } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index ce67cfe2d44..ec59e4c5b1d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.codec.CodecService; +import org.elasticsearch.index.engine.DocIdSeqNoAndTerm; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineTestCase; @@ -58,6 +59,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.instanceOf; public class FollowingEngineTests extends ESTestCase { @@ -298,6 +300,25 @@ public class FollowingEngineTests extends ESTestCase { return new Engine.Delete(parsedDoc.type(), parsedDoc.id(), EngineTestCase.newUid(parsedDoc), primaryTerm.get()); } + private Engine.Result applyOperation(Engine engine, Engine.Operation op, + long primaryTerm, Engine.Operation.Origin origin) throws IOException { + final VersionType versionType = origin == Engine.Operation.Origin.PRIMARY ? op.versionType() : null; + final Engine.Result result; + if (op instanceof Engine.Index) { + Engine.Index index = (Engine.Index) op; + result = engine.index(new Engine.Index(index.uid(), index.parsedDoc(), index.seqNo(), primaryTerm, index.version(), + versionType, origin, index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry())); + } else if (op instanceof Engine.Delete) { + Engine.Delete delete = (Engine.Delete) op; + result = engine.delete(new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), primaryTerm, + delete.version(), versionType, origin, delete.startTime())); + } else { + Engine.NoOp noOp = (Engine.NoOp) op; + result = engine.noOp(new Engine.NoOp(noOp.seqNo(), primaryTerm, origin, noOp.startTime(), noOp.reason())); + } + return result; + } + public void testBasicOptimization() throws Exception { runFollowTest((leader, follower) -> { long numDocs = between(1, 100); @@ -531,4 +552,57 @@ public class FollowingEngineTests extends ESTestCase { } }; } + + public void testProcessOnceOnPrimary() throws Exception { + final Settings settings = Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + .put("index.version.created", Version.CURRENT).put("index.xpack.ccr.following_index", true).build(); + final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); + final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + int numOps = between(10, 100); + List operations = new ArrayList<>(numOps); + for (int i = 0; i < numOps; i++) { + ParsedDocument doc = EngineTestCase.createParsedDoc(Integer.toString(between(1, 100)), null); + if (randomBoolean()) { + operations.add(new Engine.Index(EngineTestCase.newUid(doc), doc, i, primaryTerm.get(), 1L, + VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, threadPool.relativeTimeInMillis(), -1, true)); + } else { + operations.add(new Engine.Delete(doc.type(), doc.id(), EngineTestCase.newUid(doc), i, primaryTerm.get(), 1L, + VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, threadPool.relativeTimeInMillis())); + } + } + Randomness.shuffle(operations); + try (Store store = createStore(shardId, indexSettings, newDirectory())) { + final EngineConfig engineConfig = engineConfig(shardId, indexSettings, threadPool, store, logger, xContentRegistry()); + try (FollowingEngine followingEngine = createEngine(store, engineConfig)) { + followingEngine.advanceMaxSeqNoOfUpdatesOrDeletes(operations.size() - 1L); + final long oldTerm = randomLongBetween(1, Integer.MAX_VALUE); + for (Engine.Operation op : operations) { + Engine.Result result = applyOperation(followingEngine, op, oldTerm, randomFrom(Engine.Operation.Origin.values())); + assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); + } + // Primary should reject duplicates + final long newTerm = randomLongBetween(oldTerm + 1, Long.MAX_VALUE); + for (Engine.Operation op : operations) { + Engine.Result result = applyOperation(followingEngine, op, newTerm, Engine.Operation.Origin.PRIMARY); + assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE)); + assertThat(result.getFailure(), instanceOf(AlreadyProcessedFollowingEngineException.class)); + } + for (DocIdSeqNoAndTerm docId : getDocIds(followingEngine, true)) { + assertThat(docId.getPrimaryTerm(), equalTo(oldTerm)); + } + // Replica should accept duplicates + primaryTerm.set(newTerm); + followingEngine.rollTranslogGeneration(); + for (Engine.Operation op : operations) { + Engine.Operation.Origin nonPrimary = randomValueOtherThan(Engine.Operation.Origin.PRIMARY, + () -> randomFrom(Engine.Operation.Origin.values())); + Engine.Result result = applyOperation(followingEngine, op, newTerm, nonPrimary); + assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); + } + for (DocIdSeqNoAndTerm docId : getDocIds(followingEngine, true)) { + assertThat(docId.getPrimaryTerm(), equalTo(oldTerm)); + } + } + } + } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsCollectorTests.java similarity index 67% rename from x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java rename to x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsCollectorTests.java index b0f2a00d2dc..904735ffedf 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsCollectorTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.client.CcrClient; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -39,11 +39,11 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -public class CcrStatsCollectorTests extends AbstractCcrCollectorTestCase { +public class FollowStatsCollectorTests extends AbstractCcrCollectorTestCase { @Override AbstractCcrCollector createCollector(Settings settings, ClusterService clusterService, XPackLicenseState licenseState, Client client) { - return new CcrStatsCollector(settings, clusterService, licenseState, client); + return new FollowStatsCollector(settings, clusterService, licenseState, client); } public void testDoCollect() throws Exception { @@ -55,18 +55,20 @@ public class CcrStatsCollectorTests extends AbstractCcrCollectorTestCase { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - withCollectionTimeout(CcrStatsCollector.CCR_STATS_TIMEOUT, timeout); + withCollectionTimeout(FollowStatsCollector.CCR_STATS_TIMEOUT, timeout); - final CcrStatsCollector collector = new CcrStatsCollector(Settings.EMPTY, clusterService, licenseState, client, threadContext); + final FollowStatsCollector collector = + new FollowStatsCollector(Settings.EMPTY, clusterService, licenseState, client, threadContext); assertEquals(timeout, collector.getCollectionTimeout()); - final List statuses = mockStatuses(); + final List statuses = mockStatuses(); @SuppressWarnings("unchecked") - final ActionFuture future = (ActionFuture)mock(ActionFuture.class); - final CcrStatsAction.StatsResponses responses = new CcrStatsAction.StatsResponses(emptyList(), emptyList(), statuses); + final ActionFuture future = + (ActionFuture)mock(ActionFuture.class); + final FollowStatsAction.StatsResponses responses = new FollowStatsAction.StatsResponses(emptyList(), emptyList(), statuses); - final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); + final FollowStatsAction.StatsRequest request = new FollowStatsAction.StatsRequest(); request.setIndices(Strings.EMPTY_ARRAY); when(client.stats(statsRequestEq(request))).thenReturn(future); when(future.actionGet(timeout)).thenReturn(responses); @@ -81,26 +83,26 @@ public class CcrStatsCollectorTests extends AbstractCcrCollectorTestCase { int index = 0; for (final Iterator it = documents.iterator(); it.hasNext(); index++) { - final CcrStatsMonitoringDoc document = (CcrStatsMonitoringDoc)it.next(); - final CcrStatsAction.StatsResponse status = statuses.get(index); + final FollowStatsMonitoringDoc document = (FollowStatsMonitoringDoc)it.next(); + final FollowStatsAction.StatsResponse status = statuses.get(index); assertThat(document.getCluster(), is(clusterUuid)); assertThat(document.getTimestamp(), greaterThan(0L)); assertThat(document.getIntervalMillis(), equalTo(interval)); assertThat(document.getNode(), equalTo(node)); assertThat(document.getSystem(), is(MonitoredSystem.ES)); - assertThat(document.getType(), is(CcrStatsMonitoringDoc.TYPE)); + assertThat(document.getType(), is(FollowStatsMonitoringDoc.TYPE)); assertThat(document.getId(), nullValue()); assertThat(document.status(), is(status.status())); } } - private List mockStatuses() { + private List mockStatuses() { final int count = randomIntBetween(1, 8); - final List statuses = new ArrayList<>(count); + final List statuses = new ArrayList<>(count); for (int i = 0; i < count; ++i) { - CcrStatsAction.StatsResponse statsResponse = mock(CcrStatsAction.StatsResponse.class); + FollowStatsAction.StatsResponse statsResponse = mock(FollowStatsAction.StatsResponse.class); ShardFollowNodeTaskStatus status = mock(ShardFollowNodeTaskStatus.class); when(statsResponse.status()).thenReturn(status); statuses.add(statsResponse); @@ -109,21 +111,21 @@ public class CcrStatsCollectorTests extends AbstractCcrCollectorTestCase { return statuses; } - private static CcrStatsAction.StatsRequest statsRequestEq(CcrStatsAction.StatsRequest expected) { - return argThat(new StatsRequestMatches(expected)); + private static FollowStatsAction.StatsRequest statsRequestEq(FollowStatsAction.StatsRequest expected) { + return argThat(new FollowStatsRequest(expected)); } - private static class StatsRequestMatches extends ArgumentMatcher { + private static class FollowStatsRequest extends ArgumentMatcher { - private final CcrStatsAction.StatsRequest expected; + private final FollowStatsAction.StatsRequest expected; - private StatsRequestMatches(CcrStatsAction.StatsRequest expected) { + private FollowStatsRequest(FollowStatsAction.StatsRequest expected) { this.expected = expected; } @Override public boolean matches(Object o) { - CcrStatsAction.StatsRequest actual = (CcrStatsAction.StatsRequest) o; + FollowStatsAction.StatsRequest actual = (FollowStatsAction.StatsRequest) o; return Arrays.equals(expected.indices(), actual.indices()); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java similarity index 93% rename from x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java rename to x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 808a1e20159..58af900a5ad 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; -public class CcrStatsMonitoringDocTests extends BaseMonitoringDocTestCase { +public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase { private ShardFollowNodeTaskStatus status; @@ -52,12 +52,12 @@ public class CcrStatsMonitoringDocTests extends BaseMonitoringDocTestCase new CcrStatsMonitoringDoc(cluster, timestamp, interval, node, null)); + expectThrows(NullPointerException.class, () -> new FollowStatsMonitoringDoc(cluster, timestamp, interval, node, null)); assertThat(e, hasToString(containsString("status"))); } @Override - protected CcrStatsMonitoringDoc createMonitoringDoc( + protected FollowStatsMonitoringDoc createMonitoringDoc( final String cluster, final long timestamp, final long interval, @@ -65,13 +65,13 @@ public class CcrStatsMonitoringDocTests extends BaseMonitoringDocTestCase template = XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); - Map ccrStatsMapping = (Map) XContentMapValues.extractValue("mappings.doc.properties.ccr_stats.properties", template); + Map followStatsMapping = (Map) XContentMapValues.extractValue("mappings.doc.properties.ccr_stats.properties", template); - assertThat(serializedStatus.size(), equalTo(ccrStatsMapping.size())); + assertThat(serializedStatus.size(), equalTo(followStatsMapping.size())); for (Map.Entry entry : serializedStatus.entrySet()) { String fieldName = entry.getKey(); - Map fieldMapping = (Map) ccrStatsMapping.get(fieldName); + Map fieldMapping = (Map) followStatsMapping.get(fieldName); assertThat(fieldMapping, notNullValue()); Object fieldValue = entry.getValue(); diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index a58500b880f..01e8179fb62 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -35,7 +35,7 @@ dependencies { compile "commons-codec:commons-codec:${versions.commonscodec}" // security deps - compile 'com.unboundid:unboundid-ldapsdk:3.2.0' + compile 'com.unboundid:unboundid-ldapsdk:4.0.8' compile project(path: ':modules:transport-netty4', configuration: 'runtime') compile(project(path: ':plugins:transport-nio', configuration: 'runtime')) { // TODO: core exclusion should not be necessary, since it is a transitive dep of all plugins diff --git a/x-pack/plugin/core/licenses/unboundid-ldapsdk-3.2.0.jar.sha1 b/x-pack/plugin/core/licenses/unboundid-ldapsdk-3.2.0.jar.sha1 deleted file mode 100644 index 23697f364e9..00000000000 --- a/x-pack/plugin/core/licenses/unboundid-ldapsdk-3.2.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f76725e5a215ea468ecda06a8d66a809281e685f \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/unboundid-ldapsdk-4.0.8.jar.sha1 b/x-pack/plugin/core/licenses/unboundid-ldapsdk-4.0.8.jar.sha1 new file mode 100644 index 00000000000..b235ed0cea8 --- /dev/null +++ b/x-pack/plugin/core/licenses/unboundid-ldapsdk-4.0.8.jar.sha1 @@ -0,0 +1 @@ +bf1a0d3790f8f7bd28f1172323c26fed2e3bbaa5 \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/unboundid-ldapsdk-LICENSE.txt b/x-pack/plugin/core/licenses/unboundid-ldapsdk-LICENSE.txt index e57554e5692..5f5be0327d2 100644 --- a/x-pack/plugin/core/licenses/unboundid-ldapsdk-LICENSE.txt +++ b/x-pack/plugin/core/licenses/unboundid-ldapsdk-LICENSE.txt @@ -1,76 +1,77 @@ UnboundID LDAP SDK Free Use License -THIS IS AN AGREEMENT BETWEEN YOU ("YOU") AND UNBOUNDID CORP. ("UNBOUNDID") -REGARDING YOUR USE OF UNBOUNDID LDAP SDK FOR JAVA AND ANY ASSOCIATED -DOCUMENTATION, OBJECT CODE, COMPILED LIBRARIES, SOURCE CODE AND SOURCE FILES OR -OTHER MATERIALS MADE AVAILABLE BY UNBOUNDID (COLLECTIVELY REFERRED TO IN THIS -AGREEMENT AS THE ("SDK"). +THIS IS AN AGREEMENT BETWEEN YOU ("YOU") AND PING IDENTITY CORPORATION +("PING IDENTITY") REGARDING YOUR USE OF UNBOUNDID LDAP SDK FOR JAVA AND ANY +ASSOCIATED DOCUMENTATION, OBJECT CODE, COMPILED LIBRARIES, SOURCE CODE AND +SOURCE FILES OR OTHER MATERIALS MADE AVAILABLE BY PING IDENTITY (COLLECTIVELY +REFERRED TO IN THIS AGREEMENT AS THE ("SDK"). BY INSTALLING, ACCESSING OR OTHERWISE USING THE SDK, YOU ACCEPT THE TERMS OF THIS AGREEMENT. IF YOU DO NOT AGREE TO THE TERMS OF THIS AGREEMENT, DO NOT INSTALL, ACCESS OR USE THE SDK. -USE OF THE SDK. Subject to your compliance with this Agreement, UnboundID -grants to You a non-exclusive, royalty-free license, under UnboundID's +USE OF THE SDK. Subject to your compliance with this Agreement, Ping Identity +grants to You a non-exclusive, royalty-free license, under Ping Identity's intellectual property rights in the SDK, to use, reproduce, modify and distribute this release of the SDK; provided that no license is granted herein under any patents that may be infringed by your modifications, derivative works or by other works in which the SDK may be incorporated (collectively, your "Applications"). You may reproduce and redistribute the SDK with your Applications provided that you (i) include this license file and an -unmodified copy of the unboundid-ldapsdk-se.jar file; and (ii) such +unmodified copy of the unboundid-ldapsdk.jar file; and (ii) such redistribution is subject to a license whose terms do not conflict with or contradict the terms of this Agreement. You may also reproduce and redistribute the SDK without your Applications provided that you redistribute the SDK complete and unmodified (i.e., with all "read me" files, copyright notices, and -other legal notices and terms that UnboundID has included in the SDK). +other legal notices and terms that Ping Identity has included in the SDK). -SCOPE OF LICENSES. This Agreement does not grant You the right to use any -UnboundID intellectual property which is not included as part of the SDK. The +SCOPE OF LICENSES. This Agreement does not grant You the right to use any Ping +Identity intellectual property which is not included as part of the SDK. The SDK is licensed, not sold. This Agreement only gives You some rights to use -the SDK. UnboundID reserves all other rights. Unless applicable law gives You -more rights despite this limitation, You may use the SDK only as expressly +the SDK. Ping Identity reserves all other rights. Unless applicable law gives +You more rights despite this limitation, You may use the SDK only as expressly permitted in this Agreement. -SUPPORT. UnboundID is not obligated to provide any technical or other support -("Support Services") for the SDK to You under this Agreement. However, if -UnboundID chooses to provide any Support Services to You, Your use of such -Support Services will be governed by then-current UnboundID support policies. +SUPPORT. Ping Identity is not obligated to provide any technical or other +support ("Support Services") for the SDK to You under this Agreement. However, +if Ping Identity chooses to provide any Support Services to You, Your use of +such Support Services will be governed by then-current Ping Identity support +policies. -TERMINATION. UnboundID reserves the right to discontinue offering the SDK and -to modify the SDK at any time in its sole discretion. Notwithstanding anything -contained in this Agreement to the contrary, UnboundID may also, in its sole -discretion, terminate or suspend access to the SDK to You or any end user at -any time. In addition, if you fail to comply with the terms of this Agreement, -then any rights granted herein will be automatically terminated if such failure -is not corrected within 30 days of the initial notification of such failure. -You acknowledge that termination and/or monetary damages may not be a -sufficient remedy if You breach this Agreement and that UnboundID will be -entitled, without waiving any other rights or remedies, to injunctive or +TERMINATION. Ping Identity reserves the right to discontinue offering the SDK +and to modify the SDK at any time in its sole discretion. Notwithstanding +anything contained in this Agreement to the contrary, Ping Identity may also, +in its sole discretion, terminate or suspend access to the SDK to You or any +end user at any time. In addition, if you fail to comply with the terms of +this Agreement, then any rights granted herein will be automatically terminated +if such failure is not corrected within 30 days of the initial notification of +such failure. You acknowledge that termination and/or monetary damages may not +be a sufficient remedy if You breach this Agreement and that Ping Identity will +be entitled, without waiving any other rights or remedies, to injunctive or equitable relief as may be deemed proper by a court of competent jurisdiction -in the event of a breach. UnboundID may also terminate this Agreement if the -SDK becomes, or in UnboundID?s reasonable opinion is likely to become, the -subject of a claim of intellectual property infringement or trade secret +in the event of a breach. Ping Identity may also terminate this Agreement if +the SDK becomes, or in Ping Identity's reasonable opinion is likely to become, +the subject of a claim of intellectual property infringement or trade secret misappropriation. All rights and licenses granted herein will simultaneously and automatically terminate upon termination of this Agreement for any reason. -DISCLAIMER OF WARRANTY. THE SDK IS PROVIDED "AS IS" AND UNBOUNDID DOES NOT +DISCLAIMER OF WARRANTY. THE SDK IS PROVIDED "AS IS" AND PING IDENTITY DOES NOT WARRANT THAT THE SDK WILL BE ERROR-FREE, VIRUS-FREE, WILL PERFORM IN AN UNINTERRUPTED, SECURE OR TIMELY MANNER, OR WILL INTEROPERATE WITH OTHER HARDWARE, SOFTWARE, SYSTEMS OR DATA. TO THE MAXIMUM EXTENT ALLOWED BY LAW, ALL CONDITIONS, REPRESENTATIONS AND WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY OR OTHERWISE INCLUDING, WITHOUT LIMITATION, ANY IMPLIED WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE (EVEN IF UNBOUNDID HAD BEEN -INFORMED OF SUCH PURPOSE), OR NON-INFRINGEMENT OF THIRD PARTY RIGHTS ARE HEREBY -DISCLAIMED. +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE (EVEN IF PING IDENTITY HAD +BEEN INFORMED OF SUCH PURPOSE), OR NON-INFRINGEMENT OF THIRD PARTY RIGHTS ARE +HEREBY DISCLAIMED. -LIMITATION OF LIABILITY. IN NO EVENT WILL UNBOUNDID OR ITS SUPPLIERS BE LIABLE -FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, +LIMITATION OF LIABILITY. IN NO EVENT WILL PING IDENTITY OR ITS SUPPLIERS BE +LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, REVENUE, DATA OR DATA USE, BUSINESS INTERRUPTION, COST OF COVER, DIRECT, INDIRECT, SPECIAL, PUNITIVE, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SDK OR IN ANY WAY RELATED TO -THIS AGREEMENT, EVEN IF UNBOUNDID HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGES. +THIS AGREEMENT, EVEN IF PING IDENTITY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. ADDITIONAL RIGHTS. Certain states do not allow the exclusion of implied warranties or limitation of liability for certain kinds of damages, so the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java index e61403e8b37..ee5d3a279a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java @@ -35,7 +35,7 @@ import java.util.List; *

* Optionally, each hop can contain a "guiding query" that further limits the set of documents considered. * In our weblog example above we might choose to constrain the second hop to only look at log records that - * had a reponse code of 404. + * had a response code of 404. *

*

* If absent, the list of {@link VertexRequest}s is inherited from the prior Hop's list to avoid repeating diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index ca76e71e052..6430513d979 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.Version; @@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -84,7 +84,7 @@ import java.util.stream.StreamSupport; public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, ExtensiblePlugin, RepositoryPlugin, EnginePlugin { - private static Logger logger = ESLoggerFactory.getLogger(XPackPlugin.class); + private static Logger logger = LogManager.getLogger(XPackPlugin.class); private static DeprecationLogger deprecationLogger = new DeprecationLogger(logger); public static final String XPACK_INSTALLED_NODE_ATTR = "xpack.installed"; @@ -102,6 +102,7 @@ public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, Exte public Void run() { try { Class.forName("com.unboundid.util.Debug"); + Class.forName("com.unboundid.ldap.sdk.LDAPConnectionOptions"); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } @@ -123,7 +124,7 @@ public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, Exte //private final Environment env; protected boolean transportClientMode; protected final Licensing licensing; - // These should not be directly accessed as they cannot be overriden in tests. Please use the getters so they can be overridden. + // These should not be directly accessed as they cannot be overridden in tests. Please use the getters so they can be overridden. private static final SetOnce licenseState = new SetOnce<>(); private static final SetOnce sslService = new SetOnce<>(); private static final SetOnce licenseService = new SetOnce<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java similarity index 84% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java index a69ecbf7cdf..dba5f3b6f1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java @@ -30,13 +30,13 @@ import java.util.Map; import java.util.Objects; import java.util.TreeMap; -public class CcrStatsAction extends Action { +public class FollowStatsAction extends Action { public static final String NAME = "cluster:monitor/ccr/stats"; - public static final CcrStatsAction INSTANCE = new CcrStatsAction(); + public static final FollowStatsAction INSTANCE = new FollowStatsAction(); - private CcrStatsAction() { + private FollowStatsAction() { super(NAME); } @@ -76,15 +76,24 @@ public class CcrStatsAction extends Action { } builder.startObject(); { - for (final Map.Entry> index : taskResponsesByIndex.entrySet()) { - builder.startArray(index.getKey()); - { - for (final Map.Entry shard : index.getValue().entrySet()) { - shard.getValue().status().toXContent(builder, params); + builder.startArray("indices"); + { + for (final Map.Entry> index : taskResponsesByIndex.entrySet()) { + builder.startObject(); + { + builder.field("index", index.getKey()); + builder.startArray("shards"); + { + for (final Map.Entry shard : index.getValue().entrySet()) { + shard.getValue().status().toXContent(builder, params); + } + } + builder.endArray(); } + builder.endObject(); } - builder.endArray(); } + builder.endArray(); } builder.endObject(); return builder; @@ -140,8 +149,8 @@ public class CcrStatsAction extends Action { * This is a limitation of the current tasks API. When the transport action is executed, the tasks API invokes this match method * to find the tasks on which to execute the task-level operation (see TransportTasksAction#nodeOperation and * TransportTasksAction#processTasks). If we do the matching here, then we can not match index patterns. Therefore, we override - * TransportTasksAction#processTasks (see TransportCcrStatsAction#processTasks) and do the matching there. We should never see - * this method invoked and since we can not support matching a task on the basis of the request here, we throw that this + * TransportTasksAction#processTasks (see TransportFollowStatsAction#processTasks) and do the matching there. We should never + * see this method invoked and since we can not support matching a task on the basis of the request here, we throw that this * operation is unsupported. */ throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java index cf8c9ec2e61..9bb440cc089 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.core.ccr.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; @@ -20,7 +22,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; public class UnfollowAction extends Action { public static final UnfollowAction INSTANCE = new UnfollowAction(); - public static final String NAME = "cluster:admin/xpack/ccr/unfollow"; + public static final String NAME = "indices:admin/xpack/ccr/unfollow"; private UnfollowAction() { super(NAME); @@ -31,7 +33,7 @@ public class UnfollowAction extends Action { return new AcknowledgedResponse(); } - public static class Request extends AcknowledgedRequest { + public static class Request extends AcknowledgedRequest implements IndicesRequest { private final String followerIndex; @@ -48,6 +50,16 @@ public class UnfollowAction extends Action { return followerIndex; } + @Override + public String[] indices() { + return new String[] {followerIndex}; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException e = null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java index 1dab97599df..3d5be565c1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; @@ -54,14 +54,14 @@ public class CcrClient { } public void stats( - final CcrStatsAction.StatsRequest request, - final ActionListener listener) { - client.execute(CcrStatsAction.INSTANCE, request, listener); + final FollowStatsAction.StatsRequest request, + final ActionListener listener) { + client.execute(FollowStatsAction.INSTANCE, request, listener); } - public ActionFuture stats(final CcrStatsAction.StatsRequest request) { - final PlainActionFuture listener = PlainActionFuture.newFuture(); - client.execute(CcrStatsAction.INSTANCE, request, listener); + public ActionFuture stats(final FollowStatsAction.StatsRequest request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(FollowStatsAction.INSTANCE, request, listener); return listener; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 193695ac693..8d3c6a3565f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -91,9 +91,9 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { return groupOrJobLookup.expandJobIds(expression, allowNoJobs); } - public boolean isJobDeleted(String jobId) { + public boolean isJobDeleting(String jobId) { Job job = jobs.get(jobId); - return job == null || job.isDeleted(); + return job == null || job.isDeleting(); } public SortedMap getDatafeeds() { @@ -287,7 +287,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { if (job == null) { throw new ResourceNotFoundException("job [" + jobId + "] does not exist"); } - if (job.isDeleted() == false) { + if (job.isDeleting() == false) { throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because it hasn't marked as deleted"); } return this; @@ -318,7 +318,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { private void checkJobIsAvailableForDatafeed(String jobId) { Job job = jobs.get(jobId); - if (job == null || job.isDeleted()) { + if (job == null || job.isDeleting()) { throw ExceptionsHelper.missingJobException(jobId); } Optional existingDatafeed = getDatafeedByJobId(jobId); @@ -387,14 +387,14 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { return new MlMetadata(jobs, datafeeds); } - public void markJobAsDeleted(String jobId, PersistentTasksCustomMetaData tasks, boolean allowDeleteOpenJob) { + public void markJobAsDeleting(String jobId, PersistentTasksCustomMetaData tasks, boolean allowDeleteOpenJob) { Job job = jobs.get(jobId); if (job == null) { throw ExceptionsHelper.missingJobException(jobId); } - if (job.isDeleted()) { + if (job.isDeleting()) { // Job still exists but is already being deleted - throw new JobAlreadyMarkedAsDeletedException(); + return; } checkJobHasNoDatafeed(jobId); @@ -408,7 +408,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { } } Job.Builder jobBuilder = new Job.Builder(job); - jobBuilder.setDeleted(true); + jobBuilder.setDeleting(true); putJob(jobBuilder.build(), true); } @@ -430,7 +430,4 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { } return mlMetadata; } - - public static class JobAlreadyMarkedAsDeletedException extends RuntimeException { - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 9fbde4721cd..6b279e08521 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -42,6 +42,11 @@ public class DeleteJobAction extends Action { private String jobId; private boolean force; + /** + * Should this task store its result? + */ + private boolean shouldStoreResult; + public Request(String jobId) { this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } @@ -64,6 +69,18 @@ public class DeleteJobAction extends Action { this.force = force; } + /** + * Should this task store its result after it has finished? + */ + public void setShouldStoreResult(boolean shouldStoreResult) { + this.shouldStoreResult = shouldStoreResult; + } + + @Override + public boolean getShouldStoreResult() { + return shouldStoreResult; + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java index 1ac9f081ebe..f381d5296a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -103,6 +104,7 @@ public class FileStructure implements ToXContentObject, Writeable { public static final ParseField JAVA_TIMESTAMP_FORMATS = new ParseField("java_timestamp_formats"); public static final ParseField NEED_CLIENT_TIMEZONE = new ParseField("need_client_timezone"); public static final ParseField MAPPINGS = new ParseField("mappings"); + public static final ParseField INGEST_PIPELINE = new ParseField("ingest_pipeline"); public static final ParseField FIELD_STATS = new ParseField("field_stats"); public static final ParseField EXPLANATION = new ParseField("explanation"); @@ -128,6 +130,7 @@ public class FileStructure implements ToXContentObject, Writeable { PARSER.declareStringArray(Builder::setJavaTimestampFormats, JAVA_TIMESTAMP_FORMATS); PARSER.declareBoolean(Builder::setNeedClientTimezone, NEED_CLIENT_TIMEZONE); PARSER.declareObject(Builder::setMappings, (p, c) -> new TreeMap<>(p.map()), MAPPINGS); + PARSER.declareObject(Builder::setIngestPipeline, (p, c) -> p.mapOrdered(), INGEST_PIPELINE); PARSER.declareObject(Builder::setFieldStats, (p, c) -> { Map fieldStats = new TreeMap<>(); while (p.nextToken() == XContentParser.Token.FIELD_NAME) { @@ -157,6 +160,7 @@ public class FileStructure implements ToXContentObject, Writeable { private final String timestampField; private final boolean needClientTimezone; private final SortedMap mappings; + private final Map ingestPipeline; private final SortedMap fieldStats; private final List explanation; @@ -164,8 +168,8 @@ public class FileStructure implements ToXContentObject, Writeable { Format format, String multilineStartPattern, String excludeLinesPattern, List columnNames, Boolean hasHeaderRow, Character delimiter, Character quote, Boolean shouldTrimFields, String grokPattern, String timestampField, List jodaTimestampFormats, List javaTimestampFormats, - boolean needClientTimezone, Map mappings, Map fieldStats, - List explanation) { + boolean needClientTimezone, Map mappings, Map ingestPipeline, + Map fieldStats, List explanation) { this.numLinesAnalyzed = numLinesAnalyzed; this.numMessagesAnalyzed = numMessagesAnalyzed; @@ -188,6 +192,7 @@ public class FileStructure implements ToXContentObject, Writeable { (javaTimestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(javaTimestampFormats)); this.needClientTimezone = needClientTimezone; this.mappings = Collections.unmodifiableSortedMap(new TreeMap<>(mappings)); + this.ingestPipeline = (ingestPipeline == null) ? null : Collections.unmodifiableMap(new LinkedHashMap<>(ingestPipeline)); this.fieldStats = Collections.unmodifiableSortedMap(new TreeMap<>(fieldStats)); this.explanation = Collections.unmodifiableList(new ArrayList<>(explanation)); } @@ -212,6 +217,7 @@ public class FileStructure implements ToXContentObject, Writeable { timestampField = in.readOptionalString(); needClientTimezone = in.readBoolean(); mappings = Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())); + ingestPipeline = in.readBoolean() ? Collections.unmodifiableMap(in.readMap()) : null; fieldStats = Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap(StreamInput::readString, FieldStats::new))); explanation = Collections.unmodifiableList(in.readList(StreamInput::readString)); } @@ -262,6 +268,12 @@ public class FileStructure implements ToXContentObject, Writeable { out.writeOptionalString(timestampField); out.writeBoolean(needClientTimezone); out.writeMap(mappings); + if (ingestPipeline == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeMap(ingestPipeline); + } out.writeMap(fieldStats, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); out.writeCollection(explanation, StreamOutput::writeString); } @@ -342,6 +354,10 @@ public class FileStructure implements ToXContentObject, Writeable { return mappings; } + public Map getIngestPipeline() { + return ingestPipeline; + } + public SortedMap getFieldStats() { return fieldStats; } @@ -397,6 +413,9 @@ public class FileStructure implements ToXContentObject, Writeable { } builder.field(NEED_CLIENT_TIMEZONE.getPreferredName(), needClientTimezone); builder.field(MAPPINGS.getPreferredName(), mappings); + if (ingestPipeline != null) { + builder.field(INGEST_PIPELINE.getPreferredName(), ingestPipeline); + } if (fieldStats.isEmpty() == false) { builder.startObject(FIELD_STATS.getPreferredName()); for (Map.Entry entry : fieldStats.entrySet()) { @@ -476,6 +495,7 @@ public class FileStructure implements ToXContentObject, Writeable { private List javaTimestampFormats; private boolean needClientTimezone; private Map mappings; + private Map ingestPipeline; private Map fieldStats = Collections.emptyMap(); private List explanation; @@ -582,6 +602,11 @@ public class FileStructure implements ToXContentObject, Writeable { return this; } + public Builder setIngestPipeline(Map ingestPipeline) { + this.ingestPipeline = ingestPipeline; + return this; + } + public Builder setFieldStats(Map fieldStats) { this.fieldStats = Objects.requireNonNull(fieldStats); return this; @@ -708,7 +733,8 @@ public class FileStructure implements ToXContentObject, Writeable { return new FileStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, quote, shouldTrimFields, grokPattern, - timestampField, jodaTimestampFormats, javaTimestampFormats, needClientTimezone, mappings, fieldStats, explanation); + timestampField, jodaTimestampFormats, javaTimestampFormats, needClientTimezone, mappings, ingestPipeline, fieldStats, + explanation); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java index b5083aeecb9..b6275c6e057 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java @@ -385,7 +385,7 @@ public class Detector implements ToXContentObject, Writeable { } /** - * Excludes frequently-occuring metrics from the analysis; + * Excludes frequently-occurring metrics from the analysis; * can apply to 'by' field, 'over' field, or both * * @return the value that the user set diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index a978612fd02..5a352ab2665 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; @@ -67,7 +66,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO public static final ParseField DATA_DESCRIPTION = new ParseField("data_description"); public static final ParseField DESCRIPTION = new ParseField("description"); public static final ParseField FINISHED_TIME = new ParseField("finished_time"); - public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory"); public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config"); public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days"); @@ -77,7 +75,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); public static final ParseField MODEL_SNAPSHOT_MIN_VERSION = new ParseField("model_snapshot_min_version"); public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); - public static final ParseField DELETED = new ParseField("deleted"); + public static final ParseField DELETING = new ParseField("deleting"); // Used for QueryPage public static final ParseField RESULTS_FIELD = new ParseField("jobs"); @@ -99,33 +97,10 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO parser.declareString(Builder::setJobVersion, JOB_VERSION); parser.declareStringArray(Builder::setGroups, GROUPS); parser.declareStringOrNull(Builder::setDescription, DESCRIPTION); - parser.declareField(Builder::setCreateTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + - "] for [" + CREATE_TIME.getPreferredName() + "]"); - }, CREATE_TIME, ValueType.VALUE); - parser.declareField(Builder::setFinishedTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]"); - }, FINISHED_TIME, ValueType.VALUE); - parser.declareField(Builder::setLastDataTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]"); - }, LAST_DATA_TIME, ValueType.VALUE); + parser.declareField(Builder::setCreateTime, + p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()), CREATE_TIME, ValueType.VALUE); + parser.declareField(Builder::setFinishedTime, + p -> TimeUtils.parseTimeField(p, FINISHED_TIME.getPreferredName()), FINISHED_TIME, ValueType.VALUE); parser.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY); parser.declareObject(Builder::setAnalysisConfig, ignoreUnknownFields ? AnalysisConfig.LENIENT_PARSER : AnalysisConfig.STRICT_PARSER, ANALYSIS_CONFIG); @@ -144,7 +119,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - parser.declareBoolean(Builder::setDeleted, DELETED); + parser.declareBoolean(Builder::setDeleting, DELETING); return parser; } @@ -164,7 +139,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO // TODO: Use java.time for the Dates here: x-pack-elasticsearch#829 private final Date createTime; private final Date finishedTime; - private final Date lastDataTime; private final Long establishedModelMemory; private final AnalysisConfig analysisConfig; private final AnalysisLimits analysisLimits; @@ -178,14 +152,14 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO private final String modelSnapshotId; private final Version modelSnapshotMinVersion; private final String resultsIndexName; - private final boolean deleted; + private final boolean deleting; - private Job(String jobId, String jobType, Version jobVersion, List groups, String description, Date createTime, - Date finishedTime, Date lastDataTime, Long establishedModelMemory, + private Job(String jobId, String jobType, Version jobVersion, List groups, String description, + Date createTime, Date finishedTime, Long establishedModelMemory, AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map customSettings, - String modelSnapshotId, Version modelSnapshotMinVersion, String resultsIndexName, boolean deleted) { + String modelSnapshotId, Version modelSnapshotMinVersion, String resultsIndexName, boolean deleting) { this.jobId = jobId; this.jobType = jobType; @@ -194,7 +168,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO this.description = description; this.createTime = createTime; this.finishedTime = finishedTime; - this.lastDataTime = lastDataTime; this.establishedModelMemory = establishedModelMemory; this.analysisConfig = analysisConfig; this.analysisLimits = analysisLimits; @@ -208,7 +181,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO this.modelSnapshotId = modelSnapshotId; this.modelSnapshotMinVersion = modelSnapshotMinVersion; this.resultsIndexName = resultsIndexName; - this.deleted = deleted; + this.deleting = deleting; } public Job(StreamInput in) throws IOException { @@ -223,7 +196,12 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO description = in.readOptionalString(); createTime = new Date(in.readVLong()); finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null; - lastDataTime = in.readBoolean() ? new Date(in.readVLong()) : null; + // for removed last_data_time field + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + if (in.readBoolean()) { + in.readVLong(); + } + } if (in.getVersion().onOrAfter(Version.V_6_1_0)) { establishedModelMemory = in.readOptionalLong(); } else { @@ -246,7 +224,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO modelSnapshotMinVersion = null; } resultsIndexName = in.readString(); - deleted = in.readBoolean(); + deleting = in.readBoolean(); } /** @@ -316,16 +294,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO return finishedTime; } - /** - * The last time data was uploaded to the job or null if no - * data has been seen. - * - * @return The date at which the last data was processed - */ - public Date getLastDataTime() { - return lastDataTime; - } - /** * The established model memory of the job, or null if model * memory has not reached equilibrium yet. @@ -407,8 +375,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO return modelSnapshotMinVersion; } - public boolean isDeleted() { - return deleted; + public boolean isDeleting() { + return deleting; } /** @@ -495,10 +463,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO } else { out.writeBoolean(false); } - if (lastDataTime != null) { - out.writeBoolean(true); - out.writeVLong(lastDataTime.getTime()); - } else { + // for removed last_data_time field + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { out.writeBoolean(false); } if (out.getVersion().onOrAfter(Version.V_6_1_0)) { @@ -523,7 +489,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO } } out.writeString(resultsIndexName); - out.writeBoolean(deleted); + out.writeBoolean(deleting); } @Override @@ -553,10 +519,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix, finishedTime.getTime()); } - if (lastDataTime != null) { - builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix, - lastDataTime.getTime()); - } if (establishedModelMemory != null) { builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory); } @@ -592,8 +554,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO builder.field(MODEL_SNAPSHOT_MIN_VERSION.getPreferredName(), modelSnapshotMinVersion); } builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); - if (params.paramAsBoolean("all", false)) { - builder.field(DELETED.getPreferredName(), deleted); + if (deleting) { + builder.field(DELETING.getPreferredName(), deleting); } return builder; } @@ -616,7 +578,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO && Objects.equals(this.description, that.description) && Objects.equals(this.createTime, that.createTime) && Objects.equals(this.finishedTime, that.finishedTime) - && Objects.equals(this.lastDataTime, that.lastDataTime) && Objects.equals(this.establishedModelMemory, that.establishedModelMemory) && Objects.equals(this.analysisConfig, that.analysisConfig) && Objects.equals(this.analysisLimits, that.analysisLimits) @@ -630,15 +591,15 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) && Objects.equals(this.modelSnapshotMinVersion, that.modelSnapshotMinVersion) && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleted, that.deleted); + && Objects.equals(this.deleting, that.deleting); } @Override public int hashCode() { - return Objects.hash(jobId, jobType, jobVersion, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, + return Objects.hash(jobId, jobType, jobVersion, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleted); + modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleting); } // Class already extends from AbstractDiffable, so copied from ToXContentToBytes#toString() @@ -676,7 +637,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO private DataDescription dataDescription; private Date createTime; private Date finishedTime; - private Date lastDataTime; private Long establishedModelMemory; private ModelPlotConfig modelPlotConfig; private Long renormalizationWindowDays; @@ -687,7 +647,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO private String modelSnapshotId; private Version modelSnapshotMinVersion; private String resultsIndexName; - private boolean deleted; + private boolean deleting; public Builder() { } @@ -707,7 +667,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO this.dataDescription = job.getDataDescription(); this.createTime = job.getCreateTime(); this.finishedTime = job.getFinishedTime(); - this.lastDataTime = job.getLastDataTime(); this.establishedModelMemory = job.getEstablishedModelMemory(); this.modelPlotConfig = job.getModelPlotConfig(); this.renormalizationWindowDays = job.getRenormalizationWindowDays(); @@ -718,7 +677,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO this.modelSnapshotId = job.getModelSnapshotId(); this.modelSnapshotMinVersion = job.getModelSnapshotMinVersion(); this.resultsIndexName = job.getResultsIndexNameNoPrefix(); - this.deleted = job.isDeleted(); + this.deleting = job.isDeleting(); } public Builder(StreamInput in) throws IOException { @@ -733,7 +692,12 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO description = in.readOptionalString(); createTime = in.readBoolean() ? new Date(in.readVLong()) : null; finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null; - lastDataTime = in.readBoolean() ? new Date(in.readVLong()) : null; + // for removed last_data_time field + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + if (in.readBoolean()) { + in.readVLong(); + } + } if (in.getVersion().onOrAfter(Version.V_6_1_0)) { establishedModelMemory = in.readOptionalLong(); } @@ -753,7 +717,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO modelSnapshotMinVersion = null; } resultsIndexName = in.readOptionalString(); - deleted = in.readBoolean(); + deleting = in.readBoolean(); } public Builder setId(String id) { @@ -815,15 +779,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO return this; } - /** - * Set the wall clock time of the last data upload - * @param lastDataTime Wall clock time - */ - public Builder setLastDataTime(Date lastDataTime) { - this.lastDataTime = lastDataTime; - return this; - } - public Builder setEstablishedModelMemory(Long establishedModelMemory) { this.establishedModelMemory = establishedModelMemory; return this; @@ -879,8 +834,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO return this; } - public Builder setDeleted(boolean deleted) { - this.deleted = deleted; + public Builder setDeleting(boolean deleting) { + this.deleting = deleting; return this; } @@ -895,9 +850,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO if (modelSnapshotId != null) { invalidCreateValues.add(MODEL_SNAPSHOT_ID.getPreferredName()); } - if (lastDataTime != null) { - invalidCreateValues.add(LAST_DATA_TIME.getPreferredName()); - } if (finishedTime != null) { invalidCreateValues.add(FINISHED_TIME.getPreferredName()); } @@ -933,10 +885,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO } else { out.writeBoolean(false); } - if (lastDataTime != null) { - out.writeBoolean(true); - out.writeVLong(lastDataTime.getTime()); - } else { + // for removed last_data_time field + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { out.writeBoolean(false); } if (out.getVersion().onOrAfter(Version.V_6_1_0)) { @@ -961,7 +911,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO } } out.writeOptionalString(resultsIndexName); - out.writeBoolean(deleted); + out.writeBoolean(deleting); } @Override @@ -983,9 +933,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO if (finishedTime != null) { builder.field(FINISHED_TIME.getPreferredName(), finishedTime.getTime()); } - if (lastDataTime != null) { - builder.field(LAST_DATA_TIME.getPreferredName(), lastDataTime.getTime()); - } if (establishedModelMemory != null) { builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory); } @@ -1025,8 +972,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO if (resultsIndexName != null) { builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); } - if (params.paramAsBoolean("all", false)) { - builder.field(DELETED.getPreferredName(), deleted); + if (deleting) { + builder.field(DELETING.getPreferredName(), deleting); } builder.endObject(); @@ -1049,7 +996,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO && Objects.equals(this.dataDescription, that.dataDescription) && Objects.equals(this.createTime, that.createTime) && Objects.equals(this.finishedTime, that.finishedTime) - && Objects.equals(this.lastDataTime, that.lastDataTime) && Objects.equals(this.establishedModelMemory, that.establishedModelMemory) && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) @@ -1060,15 +1006,15 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) && Objects.equals(this.modelSnapshotMinVersion, that.modelSnapshotMinVersion) && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleted, that.deleted); + && Objects.equals(this.deleting, that.deleting); } @Override public int hashCode() { - return Objects.hash(id, jobType, jobVersion, groups, description, analysisConfig, analysisLimits, dataDescription, createTime, - finishedTime, lastDataTime, establishedModelMemory, modelPlotConfig, renormalizationWindowDays, + return Objects.hash(id, jobType, jobVersion, groups, description, analysisConfig, analysisLimits, dataDescription, + createTime, finishedTime, establishedModelMemory, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, modelSnapshotId, - modelSnapshotMinVersion, resultsIndexName, deleted); + modelSnapshotMinVersion, resultsIndexName, deleting); } /** @@ -1178,10 +1124,10 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO } return new Job( - id, jobType, jobVersion, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, + id, jobType, jobVersion, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleted); + modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleting); } private void checkValidBackgroundPersistInterval() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 3c571c9d605..b669e8f1edc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -67,6 +67,8 @@ public final class Messages { public static final String JOB_AUDIT_DATAFEED_STARTED_FROM_TO = "Datafeed started (from: {0} to: {1}) with frequency [{2}]"; public static final String JOB_AUDIT_DATAFEED_STARTED_REALTIME = "Datafeed started in real-time"; public static final String JOB_AUDIT_DATAFEED_STOPPED = "Datafeed stopped"; + public static final String JOB_AUDIT_DELETING = "Deleting job by task with id ''{0}''"; + public static final String JOB_AUDIT_DELETING_FAILED = "Error deleting job: {0}"; public static final String JOB_AUDIT_DELETED = "Job deleted"; public static final String JOB_AUDIT_KILLING = "Killing job"; public static final String JOB_AUDIT_OLD_RESULTS_DELETED = "Deleted results prior to {1}"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 316417f4b23..d95b404ed35 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -372,6 +372,9 @@ public class ElasticsearchMappings { .startObject(AnomalyRecord.PROBABILITY.getPreferredName()) .field(TYPE, DOUBLE) .endObject() + .startObject(AnomalyRecord.MULTI_BUCKET_IMPACT.getPreferredName()) + .field(TYPE, DOUBLE) + .endObject() .startObject(AnomalyRecord.FUNCTION.getPreferredName()) .field(TYPE, KEYWORD) .endObject() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java index 2f218cfb2dc..f3cd2abf461 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java @@ -12,7 +12,17 @@ import java.util.Map; public class JobDeletionTask extends Task { + private volatile boolean started; + public JobDeletionTask(long id, String type, String action, String description, TaskId parentTask, Map headers) { super(id, type, action, description, parentTask, headers); } + + public void start() { + started = true; + } + + public boolean isStarted() { + return started; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java index 2d9afa833c3..ff47cfe1ca8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; import java.io.IOException; @@ -36,15 +35,9 @@ public class FlushAcknowledgement implements ToXContentObject, Writeable { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LAST_FINALIZED_BUCKET_END.getPreferredName() + "]"); - }, LAST_FINALIZED_BUCKET_END, ObjectParser.ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + p -> TimeUtils.parseTimeField(p, LAST_FINALIZED_BUCKET_END.getPreferredName()), + LAST_FINALIZED_BUCKET_END, ObjectParser.ValueType.VALUE); } private String id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java index f2545c5abf7..b13e702e85d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -90,51 +89,16 @@ public class DataCounts implements ToXContentObject, Writeable { PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + EARLIEST_RECORD_TIME.getPreferredName() + "]"); - }, EARLIEST_RECORD_TIME, ValueType.VALUE); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LATEST_RECORD_TIME.getPreferredName() + "]"); - }, LATEST_RECORD_TIME, ValueType.VALUE); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]"); - }, LAST_DATA_TIME, ValueType.VALUE); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "]"); - }, LATEST_EMPTY_BUCKET_TIME, ValueType.VALUE); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "]"); - }, LATEST_SPARSE_BUCKET_TIME, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + p -> TimeUtils.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()), EARLIEST_RECORD_TIME, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + p -> TimeUtils.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), LATEST_RECORD_TIME, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + p -> TimeUtils.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), LAST_DATA_TIME, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + p -> TimeUtils.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), LATEST_EMPTY_BUCKET_TIME, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + p -> TimeUtils.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), LATEST_SPARSE_BUCKET_TIME, ValueType.VALUE); PARSER.declareLong((t, u) -> {;}, INPUT_RECORD_COUNT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java index 58c0a567ada..f5d7cebbc4c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -22,6 +21,7 @@ import java.io.IOException; import java.util.Date; import java.util.Locale; import java.util.Objects; +import java.util.function.BiConsumer; /** * Provide access to the C++ model memory usage numbers for the Java process. @@ -60,25 +60,12 @@ public class ModelSizeStats implements ToXContentObject, Writeable { parser.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); parser.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); parser.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD); - parser.declareField(Builder::setLogTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LOG_TIME_FIELD.getPreferredName() + "]"); - }, LOG_TIME_FIELD, ValueType.VALUE); - parser.declareField(Builder::setTimestamp, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + TIMESTAMP_FIELD.getPreferredName() + "]"); - }, TIMESTAMP_FIELD, ValueType.VALUE); - parser.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); + parser.declareField(Builder::setLogTime, + p -> TimeUtils.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()), LOG_TIME_FIELD, ValueType.VALUE); + parser.declareField(Builder::setTimestamp, + p -> TimeUtils.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()), TIMESTAMP_FIELD, ValueType.VALUE); + BiConsumer setMemoryStatus = Builder::setMemoryStatus; + parser.declareField(setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); return parser; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 068b998dc25..ad5b9d780e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -64,38 +63,17 @@ public class ModelSnapshot implements ToXContentObject, Writeable { parser.declareString(Builder::setJobId, Job.ID); parser.declareString(Builder::setMinVersion, MIN_VERSION); - parser.declareField(Builder::setTimestamp, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + TIMESTAMP.getPreferredName() + "]"); - }, TIMESTAMP, ValueType.VALUE); + parser.declareField(Builder::setTimestamp, + p -> TimeUtils.parseTimeField(p, TIMESTAMP.getPreferredName()), TIMESTAMP, ValueType.VALUE); parser.declareString(Builder::setDescription, DESCRIPTION); parser.declareString(Builder::setSnapshotId, ModelSnapshotField.SNAPSHOT_ID); parser.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); parser.declareObject(Builder::setModelSizeStats, ignoreUnknownFields ? ModelSizeStats.LENIENT_PARSER : ModelSizeStats.STRICT_PARSER, ModelSizeStats.RESULT_TYPE_FIELD); - parser.declareField(Builder::setLatestRecordTimeStamp, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LATEST_RECORD_TIME.getPreferredName() + "]"); - }, LATEST_RECORD_TIME, ValueType.VALUE); - parser.declareField(Builder::setLatestResultTimeStamp, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LATEST_RESULT_TIME.getPreferredName() + "]"); - }, LATEST_RESULT_TIME, ValueType.VALUE); + parser.declareField(Builder::setLatestRecordTimeStamp, + p -> TimeUtils.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), LATEST_RECORD_TIME, ValueType.VALUE); + parser.declareField(Builder::setLatestResultTimeStamp, + p -> TimeUtils.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()), LATEST_RESULT_TIME, ValueType.VALUE); parser.declareObject(Builder::setQuantiles, ignoreUnknownFields ? Quantiles.LENIENT_PARSER : Quantiles.STRICT_PARSER, QUANTILES); parser.declareBoolean(Builder::setRetain, RETAIN); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java index 869cdcb437e..3c099e30924 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java @@ -14,11 +14,11 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.Version; import java.io.IOException; import java.util.ArrayList; @@ -44,6 +44,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable { * Result fields (all detector types) */ public static final ParseField PROBABILITY = new ParseField("probability"); + public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact"); public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); @@ -88,18 +89,12 @@ public class AnomalyRecord implements ToXContentObject, Writeable { a -> new AnomalyRecord((String) a[0], (Date) a[1], (long) a[2])); parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + parser.declareField(ConstructingObjectParser.constructorArg(), + p -> TimeUtils.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); parser.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE); parser.declareDouble(AnomalyRecord::setProbability, PROBABILITY); + parser.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT); parser.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE); parser.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE); parser.declareInt(AnomalyRecord::setDetectorIndex, Detector.DETECTOR_INDEX); @@ -127,6 +122,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable { private final String jobId; private int detectorIndex; private double probability; + private Double multiBucketImpact; private String byFieldName; private String byFieldValue; private String correlatedByFieldValue; @@ -164,6 +160,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable { jobId = in.readString(); detectorIndex = in.readInt(); probability = in.readDouble(); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + multiBucketImpact = in.readOptionalDouble(); + } byFieldName = in.readOptionalString(); byFieldValue = in.readOptionalString(); correlatedByFieldValue = in.readOptionalString(); @@ -198,6 +197,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable { out.writeString(jobId); out.writeInt(detectorIndex); out.writeDouble(probability); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeOptionalDouble(multiBucketImpact); + } out.writeOptionalString(byFieldName); out.writeOptionalString(byFieldValue); out.writeOptionalString(correlatedByFieldValue); @@ -247,6 +249,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable { builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(PROBABILITY.getPreferredName(), probability); + if (multiBucketImpact != null) { + builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact); + } builder.field(RECORD_SCORE.getPreferredName(), recordScore); builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); @@ -389,6 +394,14 @@ public class AnomalyRecord implements ToXContentObject, Writeable { probability = value; } + public double getMultiBucketImpact() { + return multiBucketImpact; + } + + public void setMultiBucketImpact(double value) { + multiBucketImpact = value; + } + public String getByFieldName() { return byFieldName; } @@ -519,7 +532,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable { @Override public int hashCode() { - return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore, + return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore, initialRecordScore, typical, actual,function, functionDescription, fieldName, byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName, partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim, @@ -543,6 +556,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable { && this.detectorIndex == that.detectorIndex && this.bucketSpan == that.bucketSpan && this.probability == that.probability + && Objects.equals(this.multiBucketImpact, that.multiBucketImpact) && this.recordScore == that.recordScore && this.initialRecordScore == that.initialRecordScore && Objects.deepEquals(this.typical, that.typical) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java index 8280ee9f22e..793968802c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -74,15 +73,8 @@ public class Bucket implements ToXContentObject, Writeable { a -> new Bucket((String) a[0], (Date) a[1], (long) a[2])); parser.declareString(ConstructingObjectParser.constructorArg(), JOB_ID); - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + parser.declareField(ConstructingObjectParser.constructorArg(), + p -> TimeUtils.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); parser.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE); parser.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java index 38d76789a2e..e6031b3b8df 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -52,15 +51,8 @@ public class BucketInfluencer implements ToXContentObject, Writeable { ignoreUnknownFields, a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2])); parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + parser.declareField(ConstructingObjectParser.constructorArg(), + p -> TimeUtils.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); parser.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE); parser.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java index 47f6769a07f..03a9b801167 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -51,15 +50,8 @@ public class Forecast implements ToXContentObject, Writeable { parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID); parser.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID); - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + parser.declareField(ConstructingObjectParser.constructorArg(), + p -> TimeUtils.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); parser.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX); parser.declareString((modelForecast, s) -> {}, Result.RESULT_TYPE); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java index 8ee49cb88d0..d226058bf1d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -51,15 +50,8 @@ public class Influencer implements ToXContentObject, Writeable { LENIENT_PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); LENIENT_PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); LENIENT_PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); - LENIENT_PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + LENIENT_PARSER.declareField(ConstructingObjectParser.constructorArg(), + p -> TimeUtils.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); LENIENT_PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); LENIENT_PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE); LENIENT_PARSER.declareDouble(Influencer::setProbability, PROBABILITY); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java index 9f066b6e98e..c17ed54c788 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -54,15 +53,8 @@ public class ModelPlot implements ToXContentObject, Writeable { a -> new ModelPlot((String) a[0], (Date) a[1], (long) a[2], (int) a[3])); parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + parser.declareField(ConstructingObjectParser.constructorArg(), + p -> TimeUtils.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); parser.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX); parser.declareString((modelPlot, s) -> {}, Result.RESULT_TYPE); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java index 63c4278e541..8637eb11722 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java @@ -57,6 +57,7 @@ public final class ReservedFieldNames { AnomalyCause.FIELD_NAME.getPreferredName(), AnomalyRecord.PROBABILITY.getPreferredName(), + AnomalyRecord.MULTI_BUCKET_IMPACT.getPreferredName(), AnomalyRecord.BY_FIELD_NAME.getPreferredName(), AnomalyRecord.BY_FIELD_VALUE.getPreferredName(), AnomalyRecord.CORRELATED_BY_FIELD_VALUE.getPreferredName(), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java index 850d89d0a72..1763006afbe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -41,14 +40,8 @@ public class AuditMessage implements ToXContentObject, Writeable { } throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); }, LEVEL, ValueType.STRING); - PARSER.declareField(AuditMessage::setTimestamp, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + TIMESTAMP.getPreferredName() + "]"); - }, TIMESTAMP, ValueType.VALUE); + PARSER.declareField(AuditMessage::setTimestamp, + p -> TimeUtils.parseTimeField(p, TIMESTAMP.getPreferredName()), TIMESTAMP, ValueType.VALUE); PARSER.declareString(AuditMessage::setNodeName, NODE_NAME); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java index 6b334972366..019668f1a3c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java @@ -7,8 +7,11 @@ package org.elasticsearch.xpack.core.ml.utils.time; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.DateFieldMapper; +import java.io.IOException; +import java.util.Date; import java.util.concurrent.TimeUnit; public final class TimeUtils { @@ -16,6 +19,16 @@ public final class TimeUtils { // Do nothing } + public static Date parseTimeField(XContentParser parser, String fieldName) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return new Date(parser.longValue()); + } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(parser.text())); + } + throw new IllegalArgumentException( + "unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + /** * First tries to parse the date first as a Long and convert that to an * epoch time. If the long number has more than 10 digits it is considered a diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java index 0f073ef4ae3..355a96dd19c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.core.security.authc; import org.elasticsearch.common.Nullable; import org.elasticsearch.xpack.core.security.user.User; +import java.util.Collections; +import java.util.Map; import java.util.Objects; /** @@ -21,7 +23,9 @@ import java.util.Objects; * */ public final class AuthenticationResult { - private static final AuthenticationResult NOT_HANDLED = new AuthenticationResult(Status.CONTINUE, null, null, null); + private static final AuthenticationResult NOT_HANDLED = new AuthenticationResult(Status.CONTINUE, null, null, null, null); + + public static String THREAD_CONTEXT_KEY = "_xpack_security_auth_result"; public enum Status { SUCCESS, @@ -33,12 +37,15 @@ public final class AuthenticationResult { private final User user; private final String message; private final Exception exception; + private final Map metadata; - private AuthenticationResult(Status status, @Nullable User user, @Nullable String message, @Nullable Exception exception) { + private AuthenticationResult(Status status, @Nullable User user, @Nullable String message, @Nullable Exception exception, + @Nullable Map metadata) { this.status = status; this.user = user; this.message = message; this.exception = exception; + this.metadata = metadata == null ? Collections.emptyMap() : Collections.unmodifiableMap(metadata); } public Status getStatus() { @@ -57,6 +64,10 @@ public final class AuthenticationResult { return exception; } + public Map getMetadata() { + return metadata; + } + /** * Creates an {@code AuthenticationResult} that indicates that the supplied {@link User} * has been successfully authenticated. @@ -69,7 +80,16 @@ public final class AuthenticationResult { */ public static AuthenticationResult success(User user) { Objects.requireNonNull(user); - return new AuthenticationResult(Status.SUCCESS, user, null, null); + return success(user, null); + } + + /** + * Creates a successful result, with optional metadata + * + * @see #success(User) + */ + public static AuthenticationResult success(User user, @Nullable Map metadata) { + return new AuthenticationResult(Status.SUCCESS, user, null, null, metadata); } /** @@ -96,7 +116,7 @@ public final class AuthenticationResult { */ public static AuthenticationResult unsuccessful(String message, @Nullable Exception cause) { Objects.requireNonNull(message); - return new AuthenticationResult(Status.CONTINUE, null, message, cause); + return new AuthenticationResult(Status.CONTINUE, null, message, cause, null); } /** @@ -110,7 +130,7 @@ public final class AuthenticationResult { *

*/ public static AuthenticationResult terminate(String message, @Nullable Exception cause) { - return new AuthenticationResult(Status.TERMINATE, null, message, cause); + return new AuthenticationResult(Status.TERMINATE, null, message, cause, null); } public boolean isAuthenticated() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java index e41b1409981..eebcb6db7af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java @@ -19,6 +19,6 @@ public final class NativeRealmSettings { * @return The {@link Setting setting configuration} for this realm type */ public static Set> getSettings() { - return CachingUsernamePasswordRealmSettings.getCachingSettings(); + return CachingUsernamePasswordRealmSettings.getSettings(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java index 110b8af9d7b..ed81d07d4cc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java @@ -19,6 +19,6 @@ public final class FileRealmSettings { * @return The {@link Setting setting configuration} for this realm type */ public static Set> getSettings() { - return CachingUsernamePasswordRealmSettings.getCachingSettings(); + return CachingUsernamePasswordRealmSettings.getSettings(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java index 3f79c722be3..272b4115b28 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java @@ -29,7 +29,7 @@ public final class LdapRealmSettings { */ public static Set> getSettings(String type) { Set> settings = new HashSet<>(); - settings.addAll(CachingUsernamePasswordRealmSettings.getCachingSettings()); + settings.addAll(CachingUsernamePasswordRealmSettings.getSettings()); settings.addAll(CompositeRoleMapperSettings.getSettings()); settings.add(LdapRealmSettings.EXECUTION_TIMEOUT); if (AD_TYPE.equals(type)) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java index 6d060b0febb..6b7867e4211 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java @@ -21,12 +21,15 @@ public final class CachingUsernamePasswordRealmSettings { public static final Setting CACHE_MAX_USERS_SETTING = Setting.intSetting("cache.max_users", DEFAULT_MAX_USERS, Setting.Property.NodeScope); + public static final Setting AUTHC_ENABLED_SETTING = Setting.boolSetting("authentication.enabled", true, + Setting.Property.NodeScope); + private CachingUsernamePasswordRealmSettings() {} /** * Returns the {@link Setting setting configuration} that is common for all caching realms */ - public static Set> getCachingSettings() { - return new HashSet<>(Arrays.asList(CACHE_HASH_ALGO_SETTING, CACHE_TTL_SETTING, CACHE_MAX_USERS_SETTING)); + public static Set> getSettings() { + return new HashSet<>(Arrays.asList(CACHE_HASH_ALGO_SETTING, CACHE_TTL_SETTING, CACHE_MAX_USERS_SETTING, AUTHC_ENABLED_SETTING)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index bc8c408731b..6281fbb2c8f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; +import org.elasticsearch.action.admin.indices.close.CloseIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction; @@ -22,6 +23,7 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import org.elasticsearch.xpack.core.security.support.Automatons; import java.util.Arrays; @@ -56,7 +58,8 @@ public final class IndexPrivilege extends Privilege { private static final Automaton VIEW_METADATA_AUTOMATON = patterns(GetAliasesAction.NAME, AliasesExistAction.NAME, GetIndexAction.NAME, IndicesExistsAction.NAME, GetFieldMappingsAction.NAME + "*", GetMappingsAction.NAME, ClusterSearchShardsAction.NAME, TypesExistsAction.NAME, ValidateQueryAction.NAME + "*", GetSettingsAction.NAME); - private static final Automaton CREATE_FOLLOW_INDEX_AUTOMATON = patterns(PutFollowAction.NAME); + private static final Automaton MANAGE_FOLLOW_INDEX_AUTOMATON = patterns(PutFollowAction.NAME, UnfollowAction.NAME, + CloseIndexAction.NAME); public static final IndexPrivilege NONE = new IndexPrivilege("none", Automatons.EMPTY); public static final IndexPrivilege ALL = new IndexPrivilege("all", ALL_AUTOMATON); @@ -71,7 +74,7 @@ public final class IndexPrivilege extends Privilege { public static final IndexPrivilege DELETE_INDEX = new IndexPrivilege("delete_index", DELETE_INDEX_AUTOMATON); public static final IndexPrivilege CREATE_INDEX = new IndexPrivilege("create_index", CREATE_INDEX_AUTOMATON); public static final IndexPrivilege VIEW_METADATA = new IndexPrivilege("view_index_metadata", VIEW_METADATA_AUTOMATON); - public static final IndexPrivilege CREATE_FOLLOW_INDEX = new IndexPrivilege("create_follow_index", CREATE_FOLLOW_INDEX_AUTOMATON); + public static final IndexPrivilege MANAGE_FOLLOW_INDEX = new IndexPrivilege("manage_follow_index", MANAGE_FOLLOW_INDEX_AUTOMATON); private static final Map VALUES = MapBuilder.newMapBuilder() .put("none", NONE) @@ -87,7 +90,7 @@ public final class IndexPrivilege extends Privilege { .put("delete_index", DELETE_INDEX) .put("view_index_metadata", VIEW_METADATA) .put("read_cross_cluster", READ_CROSS_CLUSTER) - .put("create_follow_index", CREATE_FOLLOW_INDEX) + .put("manage_follow_index", MANAGE_FOLLOW_INDEX) .immutableMap(); public static final Predicate ACTION_MATCHER = ALL.predicate(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index f1527429b32..d5a5d04dded 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -23,7 +23,8 @@ public final class SystemPrivilege extends Privilege { "indices:admin/mapping/put", // needed for recovery and shrink api "indices:admin/template/put", // needed for the TemplateUpgradeService "indices:admin/template/delete", // needed for the TemplateUpgradeService - "indices:admin/seq_no/global_checkpoint_sync*" // needed for global checkpoint syncs + "indices:admin/seq_no/global_checkpoint_sync*", // needed for global checkpoint syncs + "indices:admin/settings/update" // needed for DiskThresholdMonitor.markIndicesReadOnly ), Automatons.patterns("internal:transport/proxy/*"))); // no proxy actions for system user! private SystemPrivilege() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index b11867f8365..87a0099580b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,13 +9,18 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.function.Predicate; import static org.apache.lucene.util.automaton.MinimizationOperations.minimize; @@ -27,14 +32,23 @@ import static org.elasticsearch.common.Strings.collectionToDelimitedString; public final class Automatons { - public static final Setting MAX_DETERMINIZED_STATES_SETTING = + static final Setting MAX_DETERMINIZED_STATES_SETTING = Setting.intSetting("xpack.security.automata.max_determinized_states", 100000, DEFAULT_MAX_DETERMINIZED_STATES, Setting.Property.NodeScope); + + static final Setting CACHE_ENABLED = + Setting.boolSetting("xpack.security.automata.cache.enabled", true, Setting.Property.NodeScope); + static final Setting CACHE_SIZE = + Setting.intSetting("xpack.security.automata.cache.size", 10_000, Setting.Property.NodeScope); + static final Setting CACHE_TTL = + Setting.timeSetting("xpack.security.automata.cache.ttl", TimeValue.timeValueHours(48), Setting.Property.NodeScope); + public static final Automaton EMPTY = Automata.makeEmpty(); public static final Automaton MATCH_ALL = Automata.makeAnyString(); - // this value is not final since we allow it to be set at runtime + // these values are not final since we allow them to be set at runtime private static int maxDeterminizedStates = 100000; + private static Cache cache = buildCache(Settings.EMPTY); static final char WILDCARD_STRING = '*'; // String equality with support for wildcards static final char WILDCARD_CHAR = '?'; // Char equality with support for wildcards @@ -57,6 +71,18 @@ public final class Automatons { if (patterns.isEmpty()) { return EMPTY; } + if (cache == null) { + return buildAutomaton(patterns); + } else { + try { + return cache.computeIfAbsent(Sets.newHashSet(patterns), ignore -> buildAutomaton(patterns)); + } catch (ExecutionException e) { + throw unwrapCacheException(e); + } + } + } + + private static Automaton buildAutomaton(Collection patterns) { List automata = new ArrayList<>(patterns.size()); for (String pattern : patterns) { final Automaton patternAutomaton = pattern(pattern); @@ -69,11 +95,23 @@ public final class Automatons { * Builds and returns an automaton that represents the given pattern. */ static Automaton pattern(String pattern) { + if (cache == null) { + return buildAutomaton(pattern); + } else { + try { + return cache.computeIfAbsent(pattern, ignore -> buildAutomaton(pattern)); + } catch (ExecutionException e) { + throw unwrapCacheException(e); + } + } + } + + private static Automaton buildAutomaton(String pattern) { if (pattern.startsWith("/")) { // it's a lucene regexp if (pattern.length() == 1 || !pattern.endsWith("/")) { throw new IllegalArgumentException("invalid pattern [" + pattern + "]. patterns starting with '/' " + - "indicate regular expression pattern and therefore must also end with '/'." + - " other patterns (those that do not start with '/') will be treated as simple wildcard patterns"); + "indicate regular expression pattern and therefore must also end with '/'." + + " other patterns (those that do not start with '/') will be treated as simple wildcard patterns"); } String regex = pattern.substring(1, pattern.length() - 1); return new RegExp(regex).toAutomaton(); @@ -84,16 +122,25 @@ public final class Automatons { } } + private static RuntimeException unwrapCacheException(ExecutionException e) { + final Throwable cause = e.getCause(); + if (cause instanceof RuntimeException) { + return (RuntimeException) cause; + } else { + return new RuntimeException(cause); + } + } + /** * Builds and returns an automaton that represents the given pattern. */ @SuppressWarnings("fallthrough") // explicit fallthrough at end of switch static Automaton wildcard(String text) { List automata = new ArrayList<>(); - for (int i = 0; i < text.length();) { + for (int i = 0; i < text.length(); ) { final char c = text.charAt(i); int length = 1; - switch(c) { + switch (c) { case WILDCARD_STRING: automata.add(Automata.makeAnyString()); break; @@ -138,8 +185,19 @@ public final class Automatons { return predicate(automaton, "Predicate for " + automaton); } - public static void updateMaxDeterminizedStates(Settings settings) { + public static void updateConfiguration(Settings settings) { maxDeterminizedStates = MAX_DETERMINIZED_STATES_SETTING.get(settings); + cache = buildCache(settings); + } + + private static Cache buildCache(Settings settings) { + if (CACHE_ENABLED.get(settings) == false) { + return null; + } + return CacheBuilder.builder() + .setExpireAfterAccess(CACHE_TTL.get(settings)) + .setMaximumWeight(CACHE_SIZE.get(settings)) + .build(); } // accessor for testing @@ -161,4 +219,11 @@ public final class Automatons { } }; } + + public static void addSettings(List> settingsList) { + settingsList.add(MAX_DETERMINIZED_STATES_SETTING); + settingsList.add(CACHE_ENABLED); + settingsList.add(CACHE_SIZE); + settingsList.add(CACHE_TTL); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 6503f686b64..11843a40020 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -197,6 +197,7 @@ public class CertParsingUtils { static KeyConfig createKeyConfig(X509KeyPairSettings keyPair, Settings settings, String trustStoreAlgorithm) { String keyPath = keyPair.keyPath.get(settings).orElse(null); String keyStorePath = keyPair.keystorePath.get(settings).orElse(null); + String keyStoreType = getKeyStoreType(keyPair.keystoreType, settings, keyStorePath); if (keyPath != null && keyStorePath != null) { throw new IllegalArgumentException("you cannot specify a keystore and key file"); @@ -212,10 +213,9 @@ public class CertParsingUtils { return new PEMKeyConfig(keyPath, keyPassword, certPath); } - if (keyStorePath != null) { + if (keyStorePath != null || keyStoreType.equalsIgnoreCase("pkcs11")) { SecureString keyStorePassword = keyPair.keystorePassword.get(settings); String keyStoreAlgorithm = keyPair.keystoreAlgorithm.get(settings); - String keyStoreType = getKeyStoreType(keyPair.keystoreType, settings, keyStorePath); SecureString keyStoreKeyPassword = keyPair.keystoreKeyPassword.get(settings); if (keyStoreKeyPassword.length() == 0) { keyStoreKeyPassword = keyStorePassword; @@ -224,7 +224,6 @@ public class CertParsingUtils { trustStoreAlgorithm); } return null; - } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java index 0a4c0552f69..4b5055a9e86 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ssl; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; @@ -30,9 +31,14 @@ import java.util.List; */ class DefaultJDKTrustConfig extends TrustConfig { - static final DefaultJDKTrustConfig INSTANCE = new DefaultJDKTrustConfig(); + private SecureString trustStorePassword; - private DefaultJDKTrustConfig() { + /** + * @param trustStorePassword the password for the default jdk truststore defined either as a system property or in the Elasticsearch + * configuration. It applies only when PKCS#11 tokens are user, is null otherwise + */ + DefaultJDKTrustConfig(@Nullable SecureString trustStorePassword) { + this.trustStorePassword = trustStorePassword; } @Override @@ -76,13 +82,14 @@ class DefaultJDKTrustConfig extends TrustConfig { /** * Merges the default trust configuration with the provided {@link TrustConfig} * @param trustConfig the trust configuration to merge with + * @param trustStorePassword the password for the default jdk truststore. It applies only to PKCS#11 tokens * @return a {@link TrustConfig} that represents a combination of both trust configurations */ - static TrustConfig merge(TrustConfig trustConfig) { + static TrustConfig merge(TrustConfig trustConfig, SecureString trustStorePassword) { if (trustConfig == null) { - return INSTANCE; + return new DefaultJDKTrustConfig(trustStorePassword); } else { - return new CombiningTrustConfig(Arrays.asList(INSTANCE, trustConfig)); + return new CombiningTrustConfig(Arrays.asList(new DefaultJDKTrustConfig(trustStorePassword), trustConfig)); } } @@ -94,9 +101,10 @@ class DefaultJDKTrustConfig extends TrustConfig { * @return the KeyStore used as truststore for PKCS#11 initialized with the password, null otherwise */ private KeyStore getSystemTrustStore() throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException { - if (System.getProperty("javax.net.ssl.trustStoreType", "").equalsIgnoreCase("PKCS11")) { + if (System.getProperty("javax.net.ssl.trustStoreType", "").equalsIgnoreCase("PKCS11") + && trustStorePassword != null) { KeyStore keyStore = KeyStore.getInstance("PKCS11"); - keyStore.load(null, System.getProperty("javax.net.ssl.trustStorePassword", "").toCharArray()); + keyStore.load(null, trustStorePassword.getChars()); return keyStore; } return null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java index 9054d664eec..0862cb929ef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java @@ -212,12 +212,11 @@ public final class SSLConfiguration { private static TrustConfig createCertChainTrustConfig(Settings settings, KeyConfig keyConfig, SSLConfiguration global) { String trustStorePath = SETTINGS_PARSER.truststorePath.get(settings).orElse(null); - + String trustStoreType = getKeyStoreType(SETTINGS_PARSER.truststoreType, settings, trustStorePath); List caPaths = getListOrNull(SETTINGS_PARSER.caPaths, settings); if (trustStorePath != null && caPaths != null) { throw new IllegalArgumentException("you cannot specify a truststore and ca files"); } - VerificationMode verificationMode = SETTINGS_PARSER.verificationMode.get(settings).orElseGet(() -> { if (global != null) { return global.verificationMode(); @@ -228,26 +227,41 @@ public final class SSLConfiguration { return TrustAllConfig.INSTANCE; } else if (caPaths != null) { return new PEMTrustConfig(caPaths); - } else if (trustStorePath != null) { - SecureString trustStorePassword = SETTINGS_PARSER.truststorePassword.get(settings); + } else if (trustStorePath != null || trustStoreType.equalsIgnoreCase("pkcs11")) { String trustStoreAlgorithm = SETTINGS_PARSER.truststoreAlgorithm.get(settings); - String trustStoreType = getKeyStoreType(SETTINGS_PARSER.truststoreType, settings, trustStorePath); + SecureString trustStorePassword = SETTINGS_PARSER.truststorePassword.get(settings); return new StoreTrustConfig(trustStorePath, trustStoreType, trustStorePassword, trustStoreAlgorithm); } else if (global == null && System.getProperty("javax.net.ssl.trustStore") != null && System.getProperty("javax.net.ssl.trustStore").equals("NONE") == false) { try (SecureString truststorePassword = new SecureString(System.getProperty("javax.net.ssl.trustStorePassword", ""))) { return new StoreTrustConfig(System.getProperty("javax.net.ssl.trustStore"), KeyStore.getDefaultType(), truststorePassword, - System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm())); + System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm())); } } else if (global != null && keyConfig == global.keyConfig()) { return global.trustConfig(); } else if (keyConfig != KeyConfig.NONE) { - return DefaultJDKTrustConfig.merge(keyConfig); + return DefaultJDKTrustConfig.merge(keyConfig, getDefaultTrustStorePassword(settings)); } else { - return DefaultJDKTrustConfig.INSTANCE; + return new DefaultJDKTrustConfig(getDefaultTrustStorePassword(settings)); } } + private static SecureString getDefaultTrustStorePassword(Settings settings) { + // We only handle the default store password if it's a PKCS#11 token + if (System.getProperty("javax.net.ssl.trustStoreType", "").equalsIgnoreCase("PKCS11")) { + try (SecureString systemTrustStorePassword = + new SecureString(System.getProperty("javax.net.ssl.trustStorePassword", "").toCharArray())) { + if (systemTrustStorePassword.length() == 0) { + try (SecureString trustStorePassword = SETTINGS_PARSER.truststorePassword.get(settings)) { + return trustStorePassword; + } + } + return systemTrustStorePassword; + } + } + return null; + } + private static List getListOrNull(Setting> listSetting, Settings settings) { return getListOrDefault(listSetting, settings, null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfig.java index a47745c1334..3337465994c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfig.java @@ -15,8 +15,6 @@ import javax.net.ssl.X509ExtendedKeyManager; import javax.net.ssl.X509ExtendedTrustManager; import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; import java.nio.file.Path; import java.security.GeneralSecurityException; import java.security.Key; @@ -49,7 +47,7 @@ class StoreKeyConfig extends KeyConfig { /** * Creates a new configuration that can be used to load key and trust material from a {@link KeyStore} - * @param keyStorePath the path to the keystore file + * @param keyStorePath the path to the keystore file or null when keyStoreType is pkcs11 * @param keyStoreType the type of the keystore file * @param keyStorePassword the password for the keystore * @param keyPassword the password for the private key in the keystore @@ -58,7 +56,7 @@ class StoreKeyConfig extends KeyConfig { */ StoreKeyConfig(String keyStorePath, String keyStoreType, SecureString keyStorePassword, SecureString keyPassword, String keyStoreAlgorithm, String trustStoreAlgorithm) { - this.keyStorePath = Objects.requireNonNull(keyStorePath, "keystore path must be specified"); + this.keyStorePath = keyStorePath; this.keyStoreType = Objects.requireNonNull(keyStoreType, "keystore type must be specified"); // since we support reloading the keystore, we must store the passphrase in memory for the life of the node, so we // clone the password and never close it during our uses below @@ -71,7 +69,7 @@ class StoreKeyConfig extends KeyConfig { @Override X509ExtendedKeyManager createKeyManager(@Nullable Environment environment) { try { - KeyStore ks = getKeyStore(environment); + KeyStore ks = getStore(environment, keyStorePath, keyStoreType, keyStorePassword); checkKeyStore(ks); return CertParsingUtils.keyManager(ks, keyPassword.getChars(), keyStoreAlgorithm); } catch (IOException | CertificateException | NoSuchAlgorithmException | UnrecoverableKeyException | KeyStoreException e) { @@ -82,16 +80,16 @@ class StoreKeyConfig extends KeyConfig { @Override X509ExtendedTrustManager createTrustManager(@Nullable Environment environment) { try { - return CertParsingUtils.trustManager(keyStorePath, keyStoreType, keyStorePassword.getChars(), trustStoreAlgorithm, environment); - } catch (Exception e) { + KeyStore ks = getStore(environment, keyStorePath, keyStoreType, keyStorePassword); + return CertParsingUtils.trustManager(ks, trustStoreAlgorithm); + } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e); } } @Override Collection certificates(Environment environment) throws GeneralSecurityException, IOException { - final Path path = CertParsingUtils.resolvePath(keyStorePath, environment); - final KeyStore trustStore = CertParsingUtils.readKeyStore(path, keyStoreType, keyStorePassword.getChars()); + final KeyStore trustStore = getStore(environment, keyStorePath, keyStoreType, keyStorePassword); final List certificates = new ArrayList<>(); final Enumeration aliases = trustStore.aliases(); while (aliases.hasMoreElements()) { @@ -112,13 +110,16 @@ class StoreKeyConfig extends KeyConfig { @Override List filesToMonitor(@Nullable Environment environment) { + if (keyStorePath == null) { + return Collections.emptyList(); + } return Collections.singletonList(CertParsingUtils.resolvePath(keyStorePath, environment)); } @Override List privateKeys(@Nullable Environment environment) { try { - KeyStore keyStore = getKeyStore(environment); + KeyStore keyStore = getStore(environment, keyStorePath, keyStoreType, keyStorePassword); List privateKeys = new ArrayList<>(); for (Enumeration e = keyStore.aliases(); e.hasMoreElements(); ) { final String alias = e.nextElement(); @@ -135,15 +136,6 @@ class StoreKeyConfig extends KeyConfig { } } - private KeyStore getKeyStore(@Nullable Environment environment) - throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException { - try (InputStream in = Files.newInputStream(CertParsingUtils.resolvePath(keyStorePath, environment))) { - KeyStore ks = KeyStore.getInstance(keyStoreType); - ks.load(in, keyStorePassword.getChars()); - return ks; - } - } - private void checkKeyStore(KeyStore keyStore) throws KeyStoreException { Enumeration aliases = keyStore.aliases(); while (aliases.hasMoreElements()) { @@ -152,9 +144,11 @@ class StoreKeyConfig extends KeyConfig { return; } } - throw new IllegalArgumentException("the keystore [" + keyStorePath + "] does not contain a private key entry"); + final String message = null != keyStorePath ? + "the keystore [" + keyStorePath + "] does not contain a private key entry" : + "the configured PKCS#11 token does not contain a private key entry"; + throw new IllegalArgumentException(message); } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreTrustConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreTrustConfig.java index 7398d32a61c..d4848f98339 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreTrustConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/StoreTrustConfig.java @@ -55,8 +55,8 @@ class StoreTrustConfig extends TrustConfig { @Override X509ExtendedTrustManager createTrustManager(@Nullable Environment environment) { try { - return CertParsingUtils.trustManager(trustStorePath, trustStoreType, trustStorePassword.getChars(), - trustStoreAlgorithm, environment); + KeyStore trustStore = getStore(environment, trustStorePath, trustStoreType, trustStorePassword); + return CertParsingUtils.trustManager(trustStore, trustStoreAlgorithm); } catch (Exception e) { throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e); } @@ -64,8 +64,7 @@ class StoreTrustConfig extends TrustConfig { @Override Collection certificates(Environment environment) throws GeneralSecurityException, IOException { - final Path path = CertParsingUtils.resolvePath(trustStorePath, environment); - final KeyStore trustStore = CertParsingUtils.readKeyStore(path, trustStoreType, trustStorePassword.getChars()); + final KeyStore trustStore = getStore(environment, trustStorePath, trustStoreType, trustStorePassword); final List certificates = new ArrayList<>(); final Enumeration aliases = trustStore.aliases(); while (aliases.hasMoreElements()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java index f7f6d28f176..a9bc737c943 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java @@ -7,14 +7,21 @@ package org.elasticsearch.xpack.core.ssl; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; import javax.net.ssl.X509ExtendedTrustManager; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; import java.nio.file.Path; import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Arrays; @@ -58,6 +65,38 @@ abstract class TrustConfig { */ public abstract int hashCode(); + /** + * Loads and returns the appropriate {@link KeyStore} for the given configuration. The KeyStore can be backed by a file + * in any format that the Security Provider might support, or a cryptographic software or hardware token in the case + * of a PKCS#11 Provider. + * + * @param environment the environment to resolve files against or null in the case of running in a transport client + * @param storePath the path to the {@link KeyStore} to load, or null if a PKCS11 token is configured as the keystore/truststore + * of the JVM + * @param storeType the type of the {@link KeyStore} + * @param storePassword the password to be used for decrypting the {@link KeyStore} + * @return the loaded KeyStore to be used as a keystore or a truststore + * @throws KeyStoreException if an instance of the specified type cannot be loaded + * @throws CertificateException if any of the certificates in the keystore could not be loaded + * @throws NoSuchAlgorithmException if the algorithm used to check the integrity of the keystore cannot be found + * @throws IOException if there is an I/O issue with the KeyStore data or the password is incorrect + */ + KeyStore getStore(@Nullable Environment environment, @Nullable String storePath, String storeType, SecureString storePassword) + throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException { + if (null != storePath) { + try (InputStream in = Files.newInputStream(CertParsingUtils.resolvePath(storePath, environment))) { + KeyStore ks = KeyStore.getInstance(storeType); + ks.load(in, storePassword.getChars()); + return ks; + } + } else if (storeType.equalsIgnoreCase("pkcs11")) { + KeyStore ks = KeyStore.getInstance(storeType); + ks.load(null, storePassword.getChars()); + return ks; + } + throw new IllegalArgumentException("keystore.path or truststore.path can only be empty when using a PKCS#11 token"); + } + /** * A trust configuration that is a combination of a trust configuration with the default JDK trust configuration. This trust * configuration returns a trust manager verifies certificates against both the default JDK trusted configurations and the specific diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java index 2b28c2f15c9..b972781695a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java @@ -106,7 +106,7 @@ public abstract class WatchRecord implements ToXContentObject { } if (executionResult.conditionResult().met()) { final Collection values = executionResult.actionsResults().values(); - // acknowledged as state wins because the user had explicitely set this, where as throttled may happen due to execution + // acknowledged as state wins because the user had explicitly set this, where as throttled may happen due to execution if (values.stream().anyMatch((r) -> r.action().status() == Action.Result.Status.ACKNOWLEDGED)) { return ExecutionState.ACKNOWLEDGED; } else if (values.stream().anyMatch((r) -> r.action().status() == Action.Result.Status.THROTTLED)) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/input/ExecutableInput.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/input/ExecutableInput.java index 1c08af3cf90..63d68c62192 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/input/ExecutableInput.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/input/ExecutableInput.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.watcher.input; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -17,11 +16,9 @@ import java.io.IOException; public abstract class ExecutableInput implements ToXContentObject { protected final I input; - protected final Logger logger; - protected ExecutableInput(I input, Logger logger) { + protected ExecutableInput(I input) { this.input = input; - this.logger = logger; } /** diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 2620fee9fd1..921e726613f 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -797,7 +797,7 @@ } } }, - "maanagement": { + "management": { "properties": { "threads": { "type": "integer" diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/test/http/MockWebServer.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/test/http/MockWebServer.java index 2e2210fac9a..516d695db85 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/test/http/MockWebServer.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/test/http/MockWebServer.java @@ -11,12 +11,12 @@ import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsParameters; import com.sun.net.httpserver.HttpsServer; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.mocksocket.MockHttpServer; @@ -73,7 +73,7 @@ public class MockWebServer implements Closeable { */ public MockWebServer(SSLContext sslContext, boolean needClientAuth) { this.needClientAuth = needClientAuth; - this.logger = ESLoggerFactory.getLogger(this.getClass()); + this.logger = LogManager.getLogger(this.getClass()); this.sslContext = sslContext; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutJobActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutJobActionRequestTests.java index 039954f1414..46dd97a63a8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutJobActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutJobActionRequestTests.java @@ -46,7 +46,7 @@ public class PutJobActionRequestTests extends AbstractStreamableXContentTestCase public void testParseRequest_InvalidCreateSetting() throws IOException { Job.Builder jobConfiguration = buildJobBuilder(jobId, null); - jobConfiguration.setLastDataTime(new Date()); + jobConfiguration.setFinishedTime(new Date()); BytesReference bytes = XContentHelper.toXContent(jobConfiguration, XContentType.JSON, false); XContentParser parser = createParser(XContentType.JSON.xContent(), bytes); expectThrows(IllegalArgumentException.class, () -> Request.parseRequest(jobId, parser)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigActionRequestTests.java index 8fed16271e6..ac2d559c29a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigActionRequestTests.java @@ -38,7 +38,7 @@ public class ValidateJobConfigActionRequestTests extends AbstractStreamableTestC public void testParseRequest_InvalidCreateSetting() throws IOException { String jobId = randomValidJobId(); Job.Builder jobConfiguration = buildJobBuilder(jobId, null); - jobConfiguration.setLastDataTime(new Date()); + jobConfiguration.setFinishedTime(new Date()); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder xContentBuilder = jobConfiguration.toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java index d008b31f9a6..d1493f2fe4d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java @@ -14,6 +14,7 @@ import java.nio.charset.Charset; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; +import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.TreeMap; @@ -74,6 +75,14 @@ public class FileStructureTests extends AbstractSerializingTestCase ingestPipeline = new LinkedHashMap<>(); + for (String field : generateRandomStringArray(5, 20, false, false)) { + ingestPipeline.put(field, Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(10))); + } + builder.setMappings(ingestPipeline); + } + if (randomBoolean()) { Map fieldStats = new TreeMap<>(); for (String field : generateRandomStringArray(5, 20, false, false)) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index 5e793e54da4..4fa6617f045 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -101,7 +101,6 @@ public class JobTests extends AbstractSerializingTestCase { assertNotNull(job.getDataDescription()); assertNull(job.getDescription()); assertNull(job.getFinishedTime()); - assertNull(job.getLastDataTime()); assertNull(job.getModelPlotConfig()); assertNull(job.getRenormalizationWindowDays()); assertNull(job.getBackgroundPersistInterval()); @@ -484,12 +483,10 @@ public class JobTests extends AbstractSerializingTestCase { builder.setCreateTime(new Date()); builder.setFinishedTime(new Date()); - builder.setLastDataTime(new Date()); Set expected = new HashSet<>(); expected.add(Job.CREATE_TIME.getPreferredName()); expected.add(Job.FINISHED_TIME.getPreferredName()); - expected.add(Job.LAST_DATA_TIME.getPreferredName()); expected.add(Job.MODEL_SNAPSHOT_ID.getPreferredName()); assertEquals(expected, new HashSet<>(builder.invalidCreateTimeSettings())); @@ -613,9 +610,6 @@ public class JobTests extends AbstractSerializingTestCase { if (randomBoolean()) { builder.setFinishedTime(new Date(randomNonNegativeLong())); } - if (randomBoolean()) { - builder.setLastDataTime(new Date(randomNonNegativeLong())); - } if (randomBoolean()) { builder.setEstablishedModelMemory(randomNonNegativeLong()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java index fc2ee52dc41..882a46f3cbe 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java @@ -37,6 +37,9 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase patterns = Arrays.asList("*", "filebeat*de-tst-chatclassification*", - "metricbeat*de-tst-chatclassification*", - "packetbeat*de-tst-chatclassification*", - "heartbeat*de-tst-chatclassification*", - "filebeat*documentationdev*", - "metricbeat*documentationdev*", - "packetbeat*documentationdev*", - "heartbeat*documentationdev*", - "filebeat*devsupport-website*", - "metricbeat*devsupport-website*", - "packetbeat*devsupport-website*", - "heartbeat*devsupport-website*", - ".kibana-tcloud", - ".reporting-tcloud", - "filebeat-app-ingress-*", - "filebeat-app-tcloud-*", - "filebeat*documentationprod*", - "metricbeat*documentationprod*", - "packetbeat*documentationprod*", - "heartbeat*documentationprod*", - "filebeat*bender-minio-test-1*", - "metricbeat*bender-minio-test-1*", - "packetbeat*bender-minio-test-1*", - "heartbeat*bender-minio-test-1*"); + "metricbeat*de-tst-chatclassification*", + "packetbeat*de-tst-chatclassification*", + "heartbeat*de-tst-chatclassification*", + "filebeat*documentationdev*", + "metricbeat*documentationdev*", + "packetbeat*documentationdev*", + "heartbeat*documentationdev*", + "filebeat*devsupport-website*", + "metricbeat*devsupport-website*", + "packetbeat*devsupport-website*", + "heartbeat*devsupport-website*", + ".kibana-tcloud", + ".reporting-tcloud", + "filebeat-app-ingress-*", + "filebeat-app-tcloud-*", + "filebeat*documentationprod*", + "metricbeat*documentationprod*", + "packetbeat*documentationprod*", + "heartbeat*documentationprod*", + "filebeat*bender-minio-test-1*", + "metricbeat*bender-minio-test-1*", + "packetbeat*bender-minio-test-1*", + "heartbeat*bender-minio-test-1*"); final Automaton automaton = Automatons.patterns(patterns); assertTrue(Operations.isTotal(automaton)); assertTrue(automaton.isDeterministic()); @@ -137,7 +139,7 @@ public class AutomatonsTests extends ESTestCase { assertNotEquals(10000, Automatons.getMaxDeterminizedStates()); // set to the min value Settings settings = Settings.builder().put(Automatons.MAX_DETERMINIZED_STATES_SETTING.getKey(), 10000).build(); - Automatons.updateMaxDeterminizedStates(settings); + Automatons.updateConfiguration(settings); assertEquals(10000, Automatons.getMaxDeterminizedStates()); final List names = new ArrayList<>(1024); @@ -147,8 +149,63 @@ public class AutomatonsTests extends ESTestCase { TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class, () -> Automatons.patterns(names)); assertThat(e.getMaxDeterminizedStates(), equalTo(10000)); } finally { - Automatons.updateMaxDeterminizedStates(Settings.EMPTY); + Automatons.updateConfiguration(Settings.EMPTY); assertEquals(100000, Automatons.getMaxDeterminizedStates()); } } + + public void testCachingOfAutomatons() { + Automatons.updateConfiguration(Settings.EMPTY); + + String pattern1 = randomAlphaOfLengthBetween(3, 8) + "*"; + String pattern2 = "/" + randomAlphaOfLengthBetween(1, 2) + "*" + randomAlphaOfLengthBetween(2, 4) + "/"; + + final Automaton a1 = Automatons.pattern(pattern1); + final Automaton a2 = Automatons.pattern(pattern2); + + assertThat(Automatons.pattern(pattern1), sameInstance(a1)); + assertThat(Automatons.pattern(pattern2), sameInstance(a2)); + + final Automaton a3 = Automatons.patterns(pattern1, pattern2); + final Automaton a4 = Automatons.patterns(pattern2, pattern1); + assertThat(a3, sameInstance(a4)); + } + + public void testConfigurationOfCacheSize() { + final Settings settings = Settings.builder() + .put(Automatons.CACHE_SIZE.getKey(), 2) + .build(); + Automatons.updateConfiguration(settings); + + String pattern1 = "a"; + String pattern2 = "b"; + String pattern3 = "c"; + + final Automaton a1 = Automatons.pattern(pattern1); + final Automaton a2 = Automatons.pattern(pattern2); + + assertThat(Automatons.pattern(pattern1), sameInstance(a1)); + assertThat(Automatons.pattern(pattern2), sameInstance(a2)); + + final Automaton a3 = Automatons.pattern(pattern3); + assertThat(Automatons.pattern(pattern3), sameInstance(a3)); + + // either pattern 1 or 2 should be evicted (in theory it should be 1, but we don't care about that level of precision) + final Automaton a1b = Automatons.pattern(pattern1); + final Automaton a2b = Automatons.pattern(pattern2); + if (a1b == a1 && a2b == a2) { + fail("Expected one of the existing automatons to be evicted, but both were still cached"); + } + } + + public void testDisableCache() { + final Settings settings = Settings.builder() + .put(Automatons.CACHE_ENABLED.getKey(), false) + .build(); + Automatons.updateConfiguration(settings); + + final String pattern = randomAlphaOfLengthBetween(5, 10); + final Automaton automaton = Automatons.pattern(pattern); + assertThat(Automatons.pattern(pattern), not(sameInstance(automaton))); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationTests.java index bb6fd279eec..e0b70c09add 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationTests.java @@ -228,20 +228,6 @@ public class SSLConfigurationTests extends ESTestCase { assertThat(ksTrustInfo.trustStoreAlgorithm, is(equalTo("trusted"))); } - public void testThatEmptySettingsAreEqual() { - SSLConfiguration sslConfiguration = new SSLConfiguration(Settings.EMPTY); - SSLConfiguration sslConfiguration1 = new SSLConfiguration(Settings.EMPTY); - assertThat(sslConfiguration.equals(sslConfiguration1), is(equalTo(true))); - assertThat(sslConfiguration1.equals(sslConfiguration), is(equalTo(true))); - assertThat(sslConfiguration.equals(sslConfiguration), is(equalTo(true))); - assertThat(sslConfiguration1.equals(sslConfiguration1), is(equalTo(true))); - - SSLConfiguration profileSSLConfiguration = new SSLConfiguration(Settings.EMPTY, sslConfiguration); - assertThat(sslConfiguration.equals(profileSSLConfiguration), is(equalTo(true))); - assertThat(profileSSLConfiguration.equals(sslConfiguration), is(equalTo(true))); - assertThat(profileSSLConfiguration.equals(profileSSLConfiguration), is(equalTo(true))); - } - public void testThatSettingsWithDifferentKeystoresAreNotEqual() { SSLConfiguration sslConfiguration = new SSLConfiguration(Settings.builder() .put("keystore.path", "path") @@ -268,15 +254,6 @@ public class SSLConfigurationTests extends ESTestCase { assertThat(sslConfiguration1.equals(sslConfiguration1), is(equalTo(true))); } - public void testThatEmptySettingsHaveSameHashCode() { - SSLConfiguration sslConfiguration = new SSLConfiguration(Settings.EMPTY); - SSLConfiguration sslConfiguration1 = new SSLConfiguration(Settings.EMPTY); - assertThat(sslConfiguration.hashCode(), is(equalTo(sslConfiguration1.hashCode()))); - - SSLConfiguration profileSettings = new SSLConfiguration(Settings.EMPTY, sslConfiguration); - assertThat(profileSettings.hashCode(), is(equalTo(sslConfiguration.hashCode()))); - } - public void testThatSettingsWithDifferentKeystoresHaveDifferentHashCode() { SSLConfiguration sslConfiguration = new SSLConfiguration(Settings.builder() .put("keystore.path", "path") @@ -390,7 +367,8 @@ public class SSLConfigurationTests extends ESTestCase { private void assertCombiningTrustConfigContainsCorrectIssuers(SSLConfiguration sslConfiguration) { X509Certificate[] trustConfAcceptedIssuers = sslConfiguration.trustConfig().createTrustManager(null).getAcceptedIssuers(); X509Certificate[] keyConfAcceptedIssuers = sslConfiguration.keyConfig().createTrustManager(null).getAcceptedIssuers(); - X509Certificate[] defaultAcceptedIssuers = DefaultJDKTrustConfig.INSTANCE.createTrustManager(null).getAcceptedIssuers(); + X509Certificate[] defaultAcceptedIssuers = new DefaultJDKTrustConfig(null).createTrustManager(null) + .getAcceptedIssuers(); assertEquals(keyConfAcceptedIssuers.length + defaultAcceptedIssuers.length, trustConfAcceptedIssuers.length); assertThat(Arrays.asList(keyConfAcceptedIssuers), everyItem(isIn(trustConfAcceptedIssuers))); assertThat(Arrays.asList(defaultAcceptedIssuers), everyItem(isIn(trustConfAcceptedIssuers))); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index e0fee670d8d..88d10071e85 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -477,8 +477,8 @@ public class SSLServiceTests extends ESTestCase { public void testEmptyTrustManager() throws Exception { Settings settings = Settings.builder().build(); final SSLService sslService = new SSLService(settings, env); - SSLConfiguration sslConfig = new SSLConfiguration(settings); - X509ExtendedTrustManager trustManager = sslService.sslContextHolder(sslConfig).getEmptyTrustManager(); + X509ExtendedTrustManager trustManager = sslService.sslContextHolder(sslService.getSSLConfiguration("xpack.ssl")) + .getEmptyTrustManager(); assertThat(trustManager.getAcceptedIssuers(), emptyArray()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java index 511fd778113..a7d6088bc7a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ssl; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.TestEnvironment; @@ -16,6 +17,7 @@ import javax.net.ssl.X509ExtendedKeyManager; import java.security.PrivateKey; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -31,6 +33,23 @@ public class StoreKeyConfigTests extends ESTestCase { tryReadPrivateKeyFromKeyStore("PKCS12", ".p12"); } + public void testKeyStorePathCanBeEmptyForPkcs11() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); + final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + final SecureString keyStorePassword = new SecureString("password".toCharArray()); + final StoreKeyConfig keyConfig = new StoreKeyConfig(null, "PKCS12", keyStorePassword, keyStorePassword, + KeyManagerFactory.getDefaultAlgorithm(), TrustManagerFactory.getDefaultAlgorithm()); + Exception e = expectThrows(IllegalArgumentException.class, () -> + keyConfig.createKeyManager(TestEnvironment.newEnvironment(settings))); + assertThat(e.getMessage(), equalTo("keystore.path or truststore.path can only be empty when using a PKCS#11 token")); + final StoreKeyConfig keyConfigPkcs11 = new StoreKeyConfig(null, "PKCS11", keyStorePassword, keyStorePassword, + KeyManagerFactory.getDefaultAlgorithm(), TrustManagerFactory.getDefaultAlgorithm()); + ElasticsearchException ee = expectThrows(ElasticsearchException.class, () -> + keyConfigPkcs11.createKeyManager(TestEnvironment.newEnvironment(settings))); + assertThat(ee.getMessage(), containsString("failed to initialize a KeyManagerFactory")); + assertThat(ee.getCause().getMessage(), containsString("PKCS11 not found")); + } + private void tryReadPrivateKeyFromKeyStore(String type, String extension) { final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); final String path = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode" + extension).toString(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/TestsSSLService.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/TestsSSLService.java index 1d1dfe222c7..25dc0172618 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/TestsSSLService.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/TestsSSLService.java @@ -30,4 +30,8 @@ public class TestsSSLService extends SSLService { public SSLContext sslContext(Settings settings) { return sslContextHolder(super.sslConfiguration(settings)).sslContext(); } + + public SSLContext sslContext(String context) { + return sslContextHolder(super.getSSLConfiguration(context)).sslContext(); + } } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 25f2511fbc0..096f6bf63be 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -193,7 +193,7 @@ public class TransportGraphExploreAction extends HandledTransportAction + client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"))); + } + + public void testDeleteJobAsync() throws Exception { + String jobId = "delete-job-async-job"; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; + createFarequoteJob(jobId); + + String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + assertThat(indicesBeforeDelete, containsString(indexName)); + + Response response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + + "?wait_for_completion=false")); + + // Wait for task to complete + String taskId = extractTaskId(response); + Response taskResponse = client().performRequest(new Request("GET", "_tasks/" + taskId + "?wait_for_completion=true")); + assertThat(EntityUtils.toString(taskResponse.getEntity()), containsString("\"acknowledged\":true")); + + // check that the index still exists (it's shared by default) + String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + assertThat(indicesAfterDelete, containsString(indexName)); + + waitUntilIndexIsEmpty(indexName); + + // check that the job itself is gone + expectThrows(ResponseException.class, () -> + client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"))); + } + + private void waitUntilIndexIsEmpty(String indexName) throws Exception { assertBusy(() -> { try { String count = EntityUtils.toString(client().performRequest(new Request("GET", indexName + "/_count")).getEntity()); @@ -394,10 +431,14 @@ public class MlJobIT extends ESRestTestCase { fail(e.getMessage()); } }); + } - // check that the job itself is gone - expectThrows(ResponseException.class, () -> - client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"))); + private static String extractTaskId(Response response) throws IOException { + String responseAsString = EntityUtils.toString(response.getEntity()); + Pattern matchTaskId = Pattern.compile(".*\"task\":.*\"(.*)\".*"); + Matcher taskIdMatcher = matchTaskId.matcher(responseAsString); + assertTrue(taskIdMatcher.matches()); + return taskIdMatcher.group(1); } public void testDeleteJobAfterMissingIndex() throws Exception { @@ -521,7 +562,7 @@ public class MlJobIT extends ESRestTestCase { } public void testDelete_multipleRequest() throws Exception { - String jobId = "delete-job-mulitple-times"; + String jobId = "delete-job-multiple-times"; createFarequoteJob(jobId); ConcurrentMapLong responses = ConcurrentCollections.newConcurrentMapLong(); @@ -532,8 +573,8 @@ public class MlJobIT extends ESRestTestCase { AtomicReference recreationException = new AtomicReference<>(); Runnable deleteJob = () -> { + boolean forceDelete = randomBoolean(); try { - boolean forceDelete = randomBoolean(); String url = MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId; if (forceDelete) { url += "?force=true"; @@ -554,6 +595,7 @@ public class MlJobIT extends ESRestTestCase { } catch (ResponseException re) { recreationException.set(re); } catch (IOException e) { + logger.error("Error trying to recreate the job", e); ioe.set(e); } } @@ -563,14 +605,14 @@ public class MlJobIT extends ESRestTestCase { // the other to complete. This is difficult to schedule but // hopefully it will happen in CI int numThreads = 5; - Thread [] threads = new Thread[numThreads]; - for (int i=0; i jobIdProcessor = id -> { validateJobAndTaskState(id, mlMetadata, tasksMetaData); Job job = mlMetadata.getJobs().get(id); - if (job.isDeleted()) { + if (job.isDeleting()) { return; } addJobAccordingToState(id, tasksMetaData, openJobIds, closingJobIds, failedJobs); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 1d285b91f2f..89f42d62241 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -34,9 +34,9 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -45,14 +45,13 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; @@ -72,10 +71,11 @@ import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeoutException; import java.util.function.Consumer; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -90,6 +90,14 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction>> listenersByJobId; + @Inject public TransportDeleteJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, @@ -101,6 +109,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction(); } @Override @@ -114,42 +123,8 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { - - ActionListener markAsDeletingListener = ActionListener.wrap( - response -> { - if (request.isForce()) { - forceDeleteJob(request, listener); - } else { - normalDeleteJob(request, listener); - } - }, - e -> { - if (e instanceof MlMetadata.JobAlreadyMarkedAsDeletedException) { - // Don't kick off a parallel deletion task, but just wait for - // the in-progress request to finish. This is much safer in the - // case where the job with the same name might be immediately - // recreated after the delete returns. However, if a force - // delete times out then eventually kick off a parallel delete - // in case the original completely failed for some reason. - waitForDeletingJob(request.getJobId(), MachineLearningField.STATE_PERSIST_RESTORE_TIMEOUT, - ActionListener.wrap( - listener::onResponse, - e2 -> { - if (request.isForce() && e2 instanceof TimeoutException) { - forceDeleteJob(request, listener); - } else { - listener.onFailure(e2); - } - } - )); - } else { - listener.onFailure(e); - } - }); - - markJobAsDeleting(request.getJobId(), markAsDeletingListener, request.isForce()); + protected ClusterBlockException checkBlock(DeleteJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } @Override @@ -158,13 +133,71 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { + logger.debug("Deleting job '{}'", request.getJobId()); + + TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); + ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, taskId); + + // Check if there is a deletion task for this job already and if yes wait for it to complete + synchronized (listenersByJobId) { + if (listenersByJobId.containsKey(request.getJobId())) { + logger.debug("[{}] Deletion task [{}] will wait for existing deletion task to complete", + request.getJobId(), task.getId()); + listenersByJobId.get(request.getJobId()).add(listener); + return; + } else { + List> listeners = new ArrayList<>(); + listeners.add(listener); + listenersByJobId.put(request.getJobId(), listeners); + } + } + + auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId)); + + // The listener that will be executed at the end of the chain will notify all listeners + ActionListener finalListener = ActionListener.wrap( + ack -> notifyListeners(request.getJobId(), ack, null), + e -> notifyListeners(request.getJobId(), null, e) + ); + + ActionListener markAsDeletingListener = ActionListener.wrap( + response -> { + if (request.isForce()) { + forceDeleteJob(parentTaskClient, request, finalListener); + } else { + normalDeleteJob(parentTaskClient, request, finalListener); + } + }, + e -> { + auditor.error(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING_FAILED, e.getMessage())); + finalListener.onFailure(e); + }); + + markJobAsDeleting(request.getJobId(), markAsDeletingListener, request.isForce()); } - private void normalDeleteJob(DeleteJobAction.Request request, ActionListener listener) { + private void notifyListeners(String jobId, @Nullable AcknowledgedResponse ack, @Nullable Exception error) { + synchronized (listenersByJobId) { + List> listeners = listenersByJobId.remove(jobId); + if (listeners == null) { + logger.error("[{}] No deletion job listeners could be found", jobId); + return; + } + for (ActionListener listener : listeners) { + if (error != null) { + listener.onFailure(error); + } else { + listener.onResponse(ack); + } + } + } + } + + private void normalDeleteJob(ParentTaskAssigningClient parentTaskClient, DeleteJobAction.Request request, + ActionListener listener) { String jobId = request.getJobId(); - logger.debug("Deleting job '" + jobId + "'"); // Step 4. When the job has been removed from the cluster state, return a response // ------- @@ -212,10 +245,11 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction finishedHandler, Consumer failureHandler) { + private void deleteJobDocuments(ParentTaskAssigningClient parentTaskClient, String jobId, + CheckedConsumer finishedHandler, Consumer failureHandler) { final String indexName = AnomalyDetectorsIndex.getPhysicalIndexFromState(clusterService.state(), jobId); final String indexPattern = indexName + "-*"; @@ -241,7 +275,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeActionwrap( response -> deleteByQueryExecutor.onResponse(false), // skip DBQ && Alias failureHandler), - client.admin().indices()::delete); + parentTaskClient.admin().indices()::delete); } }, failure -> { @@ -312,7 +346,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction deleteQuantilesHandler = ActionListener.wrap( - response -> deleteCategorizerState(jobId, client, 1, deleteCategorizerStateHandler), + response -> deleteCategorizerState(parentTaskClient, jobId, 1, deleteCategorizerStateHandler), failureHandler); // Step 2. Delete state done, delete the quantiles ActionListener deleteStateHandler = ActionListener.wrap( - bulkResponse -> deleteQuantiles(jobId, client, deleteQuantilesHandler), + bulkResponse -> deleteQuantiles(parentTaskClient, jobId, deleteQuantilesHandler), failureHandler); // Step 1. Delete the model state - deleteModelState(jobId, client, deleteStateHandler); + deleteModelState(parentTaskClient, jobId, deleteStateHandler); } - private void deleteQuantiles(String jobId, Client client, ActionListener finishedHandler) { + private void deleteQuantiles(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener finishedHandler) { // The quantiles type and doc ID changed in v5.5 so delete both the old and new format DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace @@ -344,7 +378,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction finishedHandler.onResponse(true), e -> { // It's not a problem for us if the index wasn't found - it's equivalent to document not found @@ -356,19 +390,20 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { + private void deleteModelState(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener listener) { GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE)); - executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( + executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( response -> { List deleteCandidates = response.getPage().results(); - JobDataDeleter deleter = new JobDataDeleter(client, jobId); + JobDataDeleter deleter = new JobDataDeleter(parentTaskClient, jobId); deleter.deleteModelSnapshots(deleteCandidates, listener); }, listener::onFailure)); } - private void deleteCategorizerState(String jobId, Client client, int docNum, ActionListener finishedHandler) { + private void deleteCategorizerState(ParentTaskAssigningClient parentTaskClient, String jobId, int docNum, + ActionListener finishedHandler) { // The categorizer state type and doc ID changed in v5.5 so delete both the old and new format DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace @@ -380,13 +415,13 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction { // If we successfully deleted a document try the next one; if not we're done if (response.getDeleted() > 0) { // There's an assumption here that there won't be very many categorizer // state documents, so the recursion won't go more than, say, 5 levels deep - deleteCategorizerState(jobId, client, docNum + 1, finishedHandler); + deleteCategorizerState(parentTaskClient, jobId, docNum + 1, finishedHandler); return; } finishedHandler.onResponse(true); @@ -401,14 +436,15 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction finishedHandler) { + private void deleteAliases(ParentTaskAssigningClient parentTaskClient, String jobId, + ActionListener finishedHandler) { final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); // first find the concrete indices associated with the aliases GetAliasesRequest aliasesRequest = new GetAliasesRequest().aliases(readAliasName, writeAliasName) .indicesOptions(IndicesOptions.lenientExpandOpen()); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, + executeAsyncWithOrigin(parentTaskClient.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, ActionListener.wrap( getAliasesResponse -> { // remove the aliases from the concrete indices found in the first step @@ -419,13 +455,13 @@ public class TransportDeleteJobAction extends TransportMasterNodeActionwrap( finishedHandler::onResponse, finishedHandler::onFailure), - client.admin().indices()::aliases); + parentTaskClient.admin().indices()::aliases); }, - finishedHandler::onFailure), client.admin().indices()::getAliases); + finishedHandler::onFailure), parentTaskClient.admin().indices()::getAliases); } private IndicesAliasesRequest buildRemoveAliasesRequest(GetAliasesResponse getAliasesResponse) { @@ -445,7 +481,10 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { + private void forceDeleteJob(ParentTaskAssigningClient parentTaskClient, DeleteJobAction.Request request, + ActionListener listener) { + + logger.debug("Force deleting job [{}]", request.getJobId()); final ClusterState state = clusterService.state(); final String jobId = request.getJobId(); @@ -454,13 +493,13 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction removeTaskListener = new ActionListener() { @Override public void onResponse(Boolean response) { - normalDeleteJob(request, listener); + normalDeleteJob(parentTaskClient, request, listener); } @Override public void onFailure(Exception e) { if (e instanceof ResourceNotFoundException) { - normalDeleteJob(request, listener); + normalDeleteJob(parentTaskClient, request, listener); } else { listener.onFailure(e); } @@ -483,12 +522,13 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { + private void killProcess(ParentTaskAssigningClient parentTaskClient, String jobId, + ActionListener listener) { KillProcessAction.Request killRequest = new KillProcessAction.Request(jobId); - executeAsyncWithOrigin(client, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); + executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); } private void removePersistentTask(String jobId, ClusterState currentState, @@ -520,7 +560,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { - ClusterStateObserver stateObserver = new ClusterStateObserver(clusterService, timeout, logger, threadPool.getThreadContext()); - - ClusterState clusterState = stateObserver.setAndGetObservedState(); - if (jobIsDeletedFromState(jobId, clusterState)) { - listener.onResponse(new AcknowledgedResponse(true)); - } else { - stateObserver.waitForNextChange(new ClusterStateObserver.Listener() { - @Override - public void onNewClusterState(ClusterState state) { - listener.onResponse(new AcknowledgedResponse(true)); - } - - @Override - public void onClusterServiceClose() { - listener.onFailure(new NodeClosedException(clusterService.localNode())); - } - - @Override - public void onTimeout(TimeValue timeout) { - listener.onFailure(new TimeoutException("timed out after " + timeout)); - } - }, newClusterState -> jobIsDeletedFromState(jobId, newClusterState), timeout); - } - } - static boolean jobIsDeletedFromState(String jobId, ClusterState clusterState) { return !MlMetadata.getMlMetadata(clusterState).getJobs().containsKey(jobId); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index ab1ef73780e..7217fcc6ec9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -183,6 +183,6 @@ public class TransportGetJobsStatsAction extends TransportTasksAction stats) { Set excludeJobIds = stats.stream().map(GetJobsStatsAction.Response.JobStats::getJobId).collect(Collectors.toSet()); return requestedJobIds.stream().filter(jobId -> !excludeJobIds.contains(jobId) && - !mlMetadata.isJobDeleted(jobId)).collect(Collectors.toList()); + !mlMetadata.isJobDeleting(jobId)).collect(Collectors.toList()); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 512d8188abf..42b67b29173 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -127,8 +127,8 @@ public class TransportOpenJobAction extends TransportMasterNodeAction KEYWORD_MAX_LEN || length - str.replaceAll("\\s", "").length() > KEYWORD_MAX_SPACES; } + + /** + * Create an ingest pipeline definition appropriate for the file structure. + * @param grokPattern The Grok pattern used for parsing semi-structured text formats. null for + * fully structured formats. + * @param timestampField The input field containing the timestamp to be parsed into @timestamp. + * null if there is no timestamp. + * @param timestampFormats Timestamp formats to be used for parsing {@code timestampField}. + * May be null if {@code timestampField} is also null. + * @param needClientTimezone Is the timezone of the client supplying data to ingest required to uniquely parse the timestamp? + * @return The ingest pipeline definition, or null if none is required. + */ + public static Map makeIngestPipelineDefinition(String grokPattern, String timestampField, List timestampFormats, + boolean needClientTimezone) { + + if (grokPattern == null && timestampField == null) { + return null; + } + + Map pipeline = new LinkedHashMap<>(); + pipeline.put(Pipeline.DESCRIPTION_KEY, "Ingest pipeline created by file structure finder"); + + List> processors = new ArrayList<>(); + + if (grokPattern != null) { + Map grokProcessorSettings = new LinkedHashMap<>(); + grokProcessorSettings.put("field", "message"); + grokProcessorSettings.put("patterns", Collections.singletonList(grokPattern)); + processors.add(Collections.singletonMap("grok", grokProcessorSettings)); + } + + if (timestampField != null) { + Map dateProcessorSettings = new LinkedHashMap<>(); + dateProcessorSettings.put("field", timestampField); + if (needClientTimezone) { + dateProcessorSettings.put("timezone", "{{ " + BEAT_TIMEZONE_FIELD + " }}"); + } + dateProcessorSettings.put("formats", timestampFormats); + processors.add(Collections.singletonMap("date", dateProcessorSettings)); + } + + // This removes the interim timestamp field used for semi-structured text formats + if (grokPattern != null && timestampField != null) { + processors.add(Collections.singletonMap("remove", Collections.singletonMap("field", timestampField))); + } + + pipeline.put(Pipeline.PROCESSORS_KEY, processors); + return pipeline; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java index 7263474505f..8d58ef4e5ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java @@ -56,10 +56,14 @@ public class JsonFileStructureFinder implements FileStructureFinder { Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides, timeoutChecker); if (timeField != null) { + boolean needClientTimeZone = timeField.v2().hasTimezoneDependentParsing(); + structureBuilder.setTimestampField(timeField.v1()) .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) - .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()); + .setNeedClientTimezone(needClientTimeZone) + .setIngestPipeline(FileStructureUtils.makeIngestPipelineDefinition(null, timeField.v1(), + timeField.v2().jodaTimestampFormats, needClientTimeZone)); } Tuple, SortedMap> mappingsAndFieldStats = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index 2d3072dda39..7578ca8f7fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -113,12 +113,16 @@ public class TextLogFileStructureFinder implements FileStructureFinder { } } + boolean needClientTimeZone = bestTimestamp.v1().hasTimezoneDependentParsing(); + FileStructure structure = structureBuilder .setTimestampField(interimTimestampField) .setJodaTimestampFormats(bestTimestamp.v1().jodaTimestampFormats) .setJavaTimestampFormats(bestTimestamp.v1().javaTimestampFormats) - .setNeedClientTimezone(bestTimestamp.v1().hasTimezoneDependentParsing()) + .setNeedClientTimezone(needClientTimeZone) .setGrokPattern(grokPattern) + .setIngestPipeline(FileStructureUtils.makeIngestPipelineDefinition(grokPattern, interimTimestampField, + bestTimestamp.v1().jodaTimestampFormats, needClientTimeZone)) .setMappings(mappings) .setFieldStats(fieldStats) .setExplanation(explanation) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java index 1022d6d0ec0..4fe0c847c76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java @@ -95,10 +95,14 @@ public class XmlFileStructureFinder implements FileStructureFinder { Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides, timeoutChecker); if (timeField != null) { + boolean needClientTimeZone = timeField.v2().hasTimezoneDependentParsing(); + structureBuilder.setTimestampField(timeField.v1()) .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) - .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()); + .setNeedClientTimezone(needClientTimeZone) + .setIngestPipeline(FileStructureUtils.makeIngestPipelineDefinition(null, topLevelTag + "." + timeField.v1(), + timeField.v2().jodaTimestampFormats, needClientTimeZone)); } Tuple, SortedMap> mappingsAndFieldStats = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java index b1c73dc04db..3a76b71980b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java @@ -7,10 +7,15 @@ package org.elasticsearch.xpack.ml.rest.job; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskListener; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -37,6 +42,35 @@ public class RestDeleteJobAction extends BaseRestHandler { deleteJobRequest.setForce(restRequest.paramAsBoolean(CloseJobAction.Request.FORCE.getPreferredName(), deleteJobRequest.isForce())); deleteJobRequest.timeout(restRequest.paramAsTime("timeout", deleteJobRequest.timeout())); deleteJobRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", deleteJobRequest.masterNodeTimeout())); - return channel -> client.execute(DeleteJobAction.INSTANCE, deleteJobRequest, new RestToXContentListener<>(channel)); + + if (restRequest.paramAsBoolean("wait_for_completion", true)) { + return channel -> client.execute(DeleteJobAction.INSTANCE, deleteJobRequest, new RestToXContentListener<>(channel)); + } else { + deleteJobRequest.setShouldStoreResult(true); + + Task task = client.executeLocally(DeleteJobAction.INSTANCE, deleteJobRequest, nullTaskListener()); + // Send task description id instead of waiting for the message + return channel -> { + try (XContentBuilder builder = channel.newBuilder()) { + builder.startObject(); + builder.field("task", client.getLocalNodeId() + ":" + task.getId()); + builder.endObject(); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } + }; + } + } + + // We do not want to log anything due to a delete action + // The response or error will be returned to the client when called synchronously + // or it will be stored in the task result when called asynchronously + private static TaskListener nullTaskListener() { + return new TaskListener() { + @Override + public void onResponse(Task task, Object o) {} + + @Override + public void onFailure(Task task, Throwable e) {} + }; } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index e16ac2f9970..82478fbf5d3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -124,7 +124,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { public void testRemoveJob() { Job.Builder jobBuilder = buildJobBuilder("1"); - jobBuilder.setDeleted(true); + jobBuilder.setDeleting(true); Job job1 = jobBuilder.build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); @@ -206,7 +206,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { } public void testPutDatafeed_failBecauseJobIsBeingDeleted() { - Job job1 = createDatafeedJob().setDeleted(true).build(new Date()); + Job job1 = createDatafeedJob().setDeleting(true).build(new Date()); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java index 7171f152186..60f08067a9b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java @@ -11,7 +11,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.ml.MachineLearningField; /** - * An extention to {@link ESSingleNodeTestCase} that adds node settings specifically needed for ML test cases. + * An extension to {@link ESSingleNodeTestCase} that adds node settings specifically needed for ML test cases. */ public abstract class MlSingleNodeTestCase extends ESSingleNodeTestCase { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java index 2e00ad71251..6d4b008570c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java @@ -28,7 +28,7 @@ public class TransportGetJobsStatsActionTests extends ESTestCase { public void testDetermineJobIds() { MlMetadata mlMetadata = mock(MlMetadata.class); - when(mlMetadata.isJobDeleted(eq("id4"))).thenReturn(true); + when(mlMetadata.isJobDeleting(eq("id4"))).thenReturn(true); List result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Collections.singletonList("id1"), Collections.emptyList()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 58b60273b0e..4dd41363b73 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -79,14 +79,14 @@ public class TransportOpenJobActionTests extends ESTestCase { expectThrows(ResourceNotFoundException.class, () -> TransportOpenJobAction.validate("job_id2", mlBuilder.build())); } - public void testValidate_jobMarkedAsDeleted() { + public void testValidate_jobMarkedAsDeleting() { MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); Job.Builder jobBuilder = buildJobBuilder("job_id"); - jobBuilder.setDeleted(true); + jobBuilder.setDeleting(true); mlBuilder.putJob(jobBuilder.build(), false); Exception e = expectThrows(ElasticsearchStatusException.class, () -> TransportOpenJobAction.validate("job_id", mlBuilder.build())); - assertEquals("Cannot open job [job_id] because it has been marked as deleted", e.getMessage()); + assertEquals("Cannot open job [job_id] because it is being deleted", e.getMessage()); } public void testValidate_jobWithoutVersion() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldTests.java index 54afc2e707b..d2e13368d0d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldTests.java @@ -13,6 +13,7 @@ import org.joda.time.DateTime; import java.util.Arrays; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; public class ExtractedFieldTests extends ESTestCase { @@ -96,11 +97,32 @@ public class ExtractedFieldTests extends ESTestCase { } public void testValueGivenTimeField() { - SearchHit hit = new SearchHitBuilder(42).addField("time", new DateTime(123456789L)).build(); + final long millis = randomLong(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build(); + final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + } - ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + public void testValueGivenStringTimeField() { + final long millis = randomLong(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); + final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + } - assertThat(timeField.value(hit), equalTo(new Object[] { 123456789L })); + public void testValueGivenPre6xTimeField() { + // Prior to 6.x, timestamps were simply `long` milliseconds-past-the-epoch values + final long millis = randomLong(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); + final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + } + + public void testValueGivenUnknownFormatTimeField() { + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new Object()).build(); + final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + assertThat(expectThrows(IllegalStateException.class, () -> timeField.value(hit)).getMessage(), + startsWith("Unexpected value for a time field")); } public void testAliasVersusName() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldsTests.java index a921d1f3e73..7e98dd417cd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFieldsTests.java @@ -64,11 +64,25 @@ public class ExtractedFieldsTests extends ESTestCase { } public void testTimeFieldValue() { - SearchHit hit = new SearchHitBuilder(1).addField("time", new DateTime(1000L)).build(); + final long millis = randomLong(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build(); + final ExtractedFields extractedFields = new ExtractedFields(timeField, Collections.singletonList(timeField)); + assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); + } - ExtractedFields extractedFields = new ExtractedFields(timeField, Arrays.asList(timeField)); + public void testStringTimeFieldValue() { + final long millis = randomLong(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); + final ExtractedFields extractedFields = new ExtractedFields(timeField, Collections.singletonList(timeField)); + assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); + } - assertThat(extractedFields.timeFieldValue(hit), equalTo(1000L)); + public void testPre6xTimeFieldValue() { + // Prior to 6.x, timestamps were simply `long` milliseconds-past-the-epoch values + final long millis = randomLong(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); + final ExtractedFields extractedFields = new ExtractedFields(timeField, Collections.singletonList(timeField)); + assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); } public void testTimeFieldValueGivenEmptyArray() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java index c0e175f27b2..389a65da749 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java @@ -345,6 +345,75 @@ public class FileStructureUtilsTests extends FileStructureTestCase { assertNull(fieldStats.get("nothing")); } + public void testMakeIngestPipelineDefinitionGivenStructuredWithoutTimestamp() { + + assertNull(FileStructureUtils.makeIngestPipelineDefinition(null, null, null, false)); + } + + @SuppressWarnings("unchecked") + public void testMakeIngestPipelineDefinitionGivenStructuredWithTimestamp() { + + String timestampField = randomAlphaOfLength(10); + List timestampFormats = randomFrom(TimestampFormatFinder.ORDERED_CANDIDATE_FORMATS).jodaTimestampFormats; + boolean needClientTimezone = randomBoolean(); + + Map pipeline = + FileStructureUtils.makeIngestPipelineDefinition(null, timestampField, timestampFormats, needClientTimezone); + assertNotNull(pipeline); + + assertEquals("Ingest pipeline created by file structure finder", pipeline.remove("description")); + + List> processors = (List>) pipeline.remove("processors"); + assertNotNull(processors); + assertEquals(1, processors.size()); + + Map dateProcessor = (Map) processors.get(0).get("date"); + assertNotNull(dateProcessor); + assertEquals(timestampField, dateProcessor.get("field")); + assertEquals(needClientTimezone, dateProcessor.containsKey("timezone")); + assertEquals(timestampFormats, dateProcessor.get("formats")); + + // After removing the two expected fields there should be nothing left in the pipeline + assertEquals(Collections.emptyMap(), pipeline); + } + + @SuppressWarnings("unchecked") + public void testMakeIngestPipelineDefinitionGivenSemiStructured() { + + String grokPattern = randomAlphaOfLength(100); + String timestampField = randomAlphaOfLength(10); + List timestampFormats = randomFrom(TimestampFormatFinder.ORDERED_CANDIDATE_FORMATS).jodaTimestampFormats; + boolean needClientTimezone = randomBoolean(); + + Map pipeline = + FileStructureUtils.makeIngestPipelineDefinition(grokPattern, timestampField, timestampFormats, needClientTimezone); + assertNotNull(pipeline); + + assertEquals("Ingest pipeline created by file structure finder", pipeline.remove("description")); + + List> processors = (List>) pipeline.remove("processors"); + assertNotNull(processors); + assertEquals(3, processors.size()); + + Map grokProcessor = (Map) processors.get(0).get("grok"); + assertNotNull(grokProcessor); + assertEquals("message", grokProcessor.get("field")); + assertEquals(Collections.singletonList(grokPattern), grokProcessor.get("patterns")); + + Map dateProcessor = (Map) processors.get(1).get("date"); + assertNotNull(dateProcessor); + assertEquals(timestampField, dateProcessor.get("field")); + assertEquals(needClientTimezone, dateProcessor.containsKey("timezone")); + assertEquals(timestampFormats, dateProcessor.get("formats")); + + Map removeProcessor = (Map) processors.get(2).get("remove"); + assertNotNull(removeProcessor); + assertEquals(timestampField, dateProcessor.get("field")); + + // After removing the two expected fields there should be nothing left in the pipeline + assertEquals(Collections.emptyMap(), pipeline); + } + private Map guessMapping(List explanation, String fieldName, List fieldValues) { Tuple, FieldStats> mappingAndFieldStats = FileStructureUtils.guessMappingAndCalculateFieldStats(explanation, fieldName, fieldValues, NOOP_TIMEOUT_CHECKER); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index ecc12a58d10..c86db02ca80 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -184,7 +184,7 @@ public class BasicDistributedJobsIT extends BaseMlIntegTestCase { @TestLogging("org.elasticsearch.xpack.persistent:TRACE,org.elasticsearch.cluster.service:DEBUG,org.elasticsearch.xpack.ml.action:DEBUG") public void testDedicatedMlNode() throws Exception { internalCluster().ensureAtMostNumDataNodes(0); - // start 2 non ml node that will never get a job allocated. (but ml apis are accessable from this node) + // start 2 non ml node that will never get a job allocated. (but ml apis are accessible from this node) internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), false)); internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), false)); // start ml node diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java deleted file mode 100644 index ed23a5328ae..00000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.integration; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.core.ml.MlMetadata; -import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; -import org.elasticsearch.xpack.core.ml.action.PutJobAction; -import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -public class DeleteJobIT extends BaseMlIntegTestCase { - - public void testWaitForDelete() throws ExecutionException, InterruptedException { - final String jobId = "wait-for-delete-job"; - Job.Builder job = createJob(jobId); - PutJobAction.Request putJobRequest = new PutJobAction.Request(job); - client().execute(PutJobAction.INSTANCE, putJobRequest).get(); - - AtomicReference exceptionHolder = new AtomicReference<>(); - CountDownLatch markAsDeletedLatch = new CountDownLatch(1); - clusterService().submitStateUpdateTask("mark-job-as-deleted", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - return markJobAsDeleted(jobId, currentState); - } - - @Override - public void onFailure(String source, Exception e) { - markAsDeletedLatch.countDown(); - exceptionHolder.set(e); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - markAsDeletedLatch.countDown(); - } - }); - - assertTrue("Timed out waiting for state update", markAsDeletedLatch.await(5, TimeUnit.SECONDS)); - assertNull("mark-job-as-deleted task failed: " + exceptionHolder.get(), exceptionHolder.get()); - - // Job is marked as deleting so now a delete request should wait for it. - AtomicBoolean isDeleted = new AtomicBoolean(false); - AtomicReference deleteFailure = new AtomicReference<>(); - ActionListener deleteListener = new ActionListener() { - @Override - public void onResponse(AcknowledgedResponse response) { - isDeleted.compareAndSet(false, response.isAcknowledged()); - } - - @Override - public void onFailure(Exception e) { - deleteFailure.set(e); - } - }; - - client().execute(DeleteJobAction.INSTANCE, new DeleteJobAction.Request(jobId), deleteListener); - awaitBusy(isDeleted::get, 1, TimeUnit.SECONDS); - // still waiting - assertFalse(isDeleted.get()); - - CountDownLatch removeJobLatch = new CountDownLatch(1); - clusterService().submitStateUpdateTask("remove-job-from-state", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - assertFalse(isDeleted.get()); - return removeJobFromClusterState(jobId, currentState); - } - - @Override - public void onFailure(String source, Exception e) { - removeJobLatch.countDown(); - exceptionHolder.set(e); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - removeJobLatch.countDown(); - } - }); - - assertTrue("Timed out waiting for remove job from state response", removeJobLatch.await(5, TimeUnit.SECONDS)); - assertNull("remove-job-from-state task failed: " + exceptionHolder.get(), exceptionHolder.get()); - - assertNull("Job deletion failed: " + deleteFailure.get(), deleteFailure.get()); - assertTrue("Job was not deleted", isDeleted.get()); - } - - private ClusterState markJobAsDeleted(String jobId, ClusterState currentState) { - MlMetadata mlMetadata = MlMetadata.getMlMetadata(currentState); - assertNotNull(mlMetadata); - - MlMetadata.Builder builder = new MlMetadata.Builder(mlMetadata); - PersistentTasksCustomMetaData tasks = currentState.metaData().custom(PersistentTasksCustomMetaData.TYPE); - builder.markJobAsDeleted(jobId, tasks, true); - - ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()) - .build(); - } - - private ClusterState removeJobFromClusterState(String jobId, ClusterState currentState) { - MlMetadata.Builder builder = new MlMetadata.Builder(MlMetadata.getMlMetadata(currentState)); - builder.deleteJob(jobId, currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); - - ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()) - .build(); - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java index 807e94c2d90..78525f6ec2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java @@ -36,9 +36,6 @@ public class JobBuilderTests extends AbstractSerializingTestCase { if (randomBoolean()) { builder.setFinishedTime(new Date(randomNonNegativeLong())); } - if (randomBoolean()) { - builder.setLastDataTime(new Date(randomNonNegativeLong())); - } if (randomBoolean()) { builder.setAnalysisConfig(AnalysisConfigTests.createRandomized()); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index d18286a9db5..31f403918fa 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -40,7 +40,7 @@ import org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction; import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.collector.Collector; import org.elasticsearch.xpack.monitoring.collector.ccr.CcrAutoFollowStatsCollector; -import org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsCollector; +import org.elasticsearch.xpack.monitoring.collector.ccr.FollowStatsCollector; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryCollector; import org.elasticsearch.xpack.monitoring.collector.indices.IndexStatsCollector; @@ -144,7 +144,7 @@ public class Monitoring extends Plugin implements ActionPlugin { collectors.add(new NodeStatsCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new IndexRecoveryCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new JobStatsCollector(settings, clusterService, getLicenseState(), client)); - collectors.add(new CcrStatsCollector(settings, clusterService, getLicenseState(), client)); + collectors.add(new FollowStatsCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new CcrAutoFollowStatsCollector(settings, clusterService, getLicenseState(), client)); final MonitoringService monitoringService = new MonitoringService(settings, clusterService, threadPool, collectors, exporters); @@ -184,7 +184,7 @@ public class Monitoring extends Plugin implements ActionPlugin { settings.add(IndexRecoveryCollector.INDEX_RECOVERY_ACTIVE_ONLY); settings.add(IndexStatsCollector.INDEX_STATS_TIMEOUT); settings.add(JobStatsCollector.JOB_STATS_TIMEOUT); - settings.add(CcrStatsCollector.CCR_STATS_TIMEOUT); + settings.add(FollowStatsCollector.CCR_STATS_TIMEOUT); settings.add(CcrAutoFollowStatsCollector.CCR_AUTO_FOLLOW_STATS_TIMEOUT); settings.add(NodeStatsCollector.NODE_STATS_TIMEOUT); settings.addAll(Exporters.getSettings()); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java index f6b124d6df5..e3db3d33667 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java @@ -22,7 +22,7 @@ import java.util.Collection; import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsMonitoringDoc.TYPE; +import static org.elasticsearch.xpack.monitoring.collector.ccr.FollowStatsMonitoringDoc.TYPE; public abstract class AbstractCcrCollector extends Collector { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsCollector.java similarity index 79% rename from x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java rename to x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsCollector.java index e9f3d09ef43..3255032e785 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsCollector.java @@ -14,18 +14,18 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackClient; -import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.client.CcrClient; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import java.util.Collection; import java.util.stream.Collectors; -public final class CcrStatsCollector extends AbstractCcrCollector { +public final class FollowStatsCollector extends AbstractCcrCollector { public static final Setting CCR_STATS_TIMEOUT = collectionTimeoutSetting("ccr.stats.timeout"); - public CcrStatsCollector( + public FollowStatsCollector( final Settings settings, final ClusterService clusterService, final XPackLicenseState licenseState, @@ -34,7 +34,7 @@ public final class CcrStatsCollector extends AbstractCcrCollector { client.threadPool().getThreadContext()); } - CcrStatsCollector( + FollowStatsCollector( final Settings settings, final ClusterService clusterService, final XPackLicenseState licenseState, @@ -51,14 +51,14 @@ public final class CcrStatsCollector extends AbstractCcrCollector { MonitoringDoc.Node node) throws Exception { - final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); + final FollowStatsAction.StatsRequest request = new FollowStatsAction.StatsRequest(); request.setIndices(getCollectionIndices()); - final CcrStatsAction.StatsResponses responses = ccrClient.stats(request).actionGet(getCollectionTimeout()); + final FollowStatsAction.StatsResponses responses = ccrClient.stats(request).actionGet(getCollectionTimeout()); return responses .getStatsResponses() .stream() - .map(stats -> new CcrStatsMonitoringDoc(clusterUuid, timestamp, interval, node, stats.status())) + .map(stats -> new FollowStatsMonitoringDoc(clusterUuid, timestamp, interval, node, stats.status())) .collect(Collectors.toList()); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDoc.java similarity index 93% rename from x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDoc.java rename to x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDoc.java index 45c6a8607d4..53ceb48ad3d 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDoc.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDoc.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import java.io.IOException; import java.util.Objects; -public class CcrStatsMonitoringDoc extends MonitoringDoc { +public class FollowStatsMonitoringDoc extends MonitoringDoc { public static final String TYPE = "ccr_stats"; @@ -24,7 +24,7 @@ public class CcrStatsMonitoringDoc extends MonitoringDoc { return status; } - public CcrStatsMonitoringDoc( + public FollowStatsMonitoringDoc( final String cluster, final long timestamp, final long intervalMillis, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index 4a4b53575b2..d16f47b1a35 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -262,7 +262,7 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE updatePersistentTaskState(state, ActionListener.wrap( (task) -> { - logger.debug("Succesfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + logger.debug("Successfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "][" + state.getPosition() + "]"); listener.onResponse(new StartRollupJobAction.Response(true)); }, @@ -308,7 +308,7 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE updatePersistentTaskState(state, ActionListener.wrap( (task) -> { - logger.debug("Succesfully updated state for rollup job [" + job.getConfig().getId() + logger.debug("Successfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "]"); listener.onResponse(new StopRollupJobAction.Response(true)); }, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index a47d057b5d5..59073e763c2 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -202,7 +202,7 @@ public class RollupJobTaskTests extends ESTestCase { } else if (c == 1) { assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STOPPED)); } else { - fail("Should not have updated persistent statuse > 2 times"); + fail("Should not have updated persistent statuses > 2 times"); } listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, new PersistentTasksCustomMetaData.Assignment("foo", "foo"))); @@ -688,7 +688,7 @@ public class RollupJobTaskTests extends ESTestCase { } else if (c == 2) { assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STOPPED)); } else { - fail("Should not have updated persistent statuse > 3 times"); + fail("Should not have updated persistent statuses > 3 times"); } listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, new PersistentTasksCustomMetaData.Assignment("foo", "foo"))); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 74241be4a91..d935a31b1a5 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -23,7 +23,7 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - compile 'com.unboundid:unboundid-ldapsdk:3.2.0' + compile 'com.unboundid:unboundid-ldapsdk:4.0.8' compileOnly 'org.bouncycastle:bcprov-jdk15on:1.59' compileOnly 'org.bouncycastle:bcpkix-jdk15on:1.59' diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 76b1a87f682..2a49a129994 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -307,7 +307,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw new FIPS140LicenseBootstrapCheck())); checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); this.bootstrapChecks = Collections.unmodifiableList(checks); - Automatons.updateMaxDeterminizedStates(settings); + Automatons.updateConfiguration(settings); } else { this.bootstrapChecks = Collections.emptyList(); } @@ -609,7 +609,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw ReservedRealm.addSettings(settingsList); AuthenticationService.addSettings(settingsList); AuthorizationService.addSettings(settingsList); - settingsList.add(Automatons.MAX_DETERMINIZED_STATES_SETTING); + Automatons.addSettings(settingsList); settingsList.add(CompositeRolesStore.CACHE_SIZE_SETTING); settingsList.add(FieldPermissionsCache.CACHE_SIZE_SETTING); settingsList.add(TokenService.TOKEN_EXPIRATION); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 9dd18be510f..13fbe248bdc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateRequest; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.saml.SamlRealm; @@ -54,7 +55,12 @@ public final class TransportSamlAuthenticateAction extends HandledTransportActio Authentication originatingAuthentication = Authentication.getAuthentication(threadContext); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authenticationService.authenticate(SamlAuthenticateAction.NAME, request, saml, ActionListener.wrap(authentication -> { - final Map tokenMeta = threadContext.getTransient(SamlRealm.CONTEXT_TOKEN_DATA); + AuthenticationResult result = threadContext.getTransient(AuthenticationResult.THREAD_CONTEXT_KEY); + if (result == null) { + listener.onFailure(new IllegalStateException("Cannot find AuthenticationResult on thread context")); + return; + } + final Map tokenMeta = (Map) result.getMetadata().get(SamlRealm.CONTEXT_TOKEN_DATA); tokenService.createUserToken(authentication, originatingAuthentication, ActionListener.wrap(tuple -> { final String tokenString = tokenService.getUserTokenString(tuple.v1()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index c3888ba9453..d5242fab45f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; @@ -139,6 +140,7 @@ public class AuthenticationService extends AbstractComponent { private RealmRef authenticatedBy = null; private RealmRef lookedupBy = null; private AuthenticationToken authenticationToken = null; + private AuthenticationResult authenticationResult = null; Authenticator(RestRequest request, ActionListener listener) { this(new AuditableRestRequest(auditTrail, failureHandler, threadContext, request), null, listener); @@ -266,6 +268,7 @@ public class AuthenticationService extends AbstractComponent { if (result.getStatus() == AuthenticationResult.Status.SUCCESS) { // user was authenticated, populate the authenticated by information authenticatedBy = new RealmRef(realm.name(), realm.type(), nodeName); + authenticationResult = result; userListener.onResponse(result.getUser()); } else { // the user was not authenticated, call this so we can audit the correct event @@ -294,9 +297,9 @@ public class AuthenticationService extends AbstractComponent { } }; final IteratingActionListener authenticatingListener = - new IteratingActionListener<>(ActionListener.wrap( - (user) -> consumeUser(user, messages), - (e) -> listener.onFailure(request.exceptionProcessingRequest(e, token))), + new IteratingActionListener<>(ContextPreservingActionListener.wrapPreservingContext(ActionListener.wrap( + (user) -> consumeUser(user, messages), + (e) -> listener.onFailure(request.exceptionProcessingRequest(e, token))), threadContext), realmAuthenticatingConsumer, realmsList, threadContext); try { authenticatingListener.run(); @@ -359,6 +362,7 @@ public class AuthenticationService extends AbstractComponent { }); listener.onFailure(request.authenticationFailed(authenticationToken)); } else { + threadContext.putTransient(AuthenticationResult.THREAD_CONTEXT_KEY, authenticationResult); if (runAsEnabled) { final String runAsUsername = threadContext.getHeader(AuthenticationServiceField.RUN_AS_USER_HEADER); if (runAsUsername != null && runAsUsername.isEmpty() == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java index f28fe1c297f..3f645eab78c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java @@ -9,8 +9,8 @@ import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.LoggerContext; @@ -26,7 +26,6 @@ import org.elasticsearch.cli.Terminal.Verbosity; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -360,7 +359,7 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { * Creates a new Logger that is detached from the ROOT logger and only has an appender that will output log messages to the terminal */ static Logger getTerminalLogger(final Terminal terminal) { - final Logger logger = ESLoggerFactory.getLogger(ESNativeRealmMigrateTool.class); + final Logger logger = LogManager.getLogger(ESNativeRealmMigrateTool.class); Loggers.setLevel(logger, Level.ALL); final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java index 8482e6f090c..12a1a3a8f24 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.security.authc.kerberos; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; @@ -53,7 +53,7 @@ public class KerberosTicketValidator { return oid; } - private static final Logger LOGGER = ESLoggerFactory.getLogger(KerberosTicketValidator.class); + private static final Logger LOGGER = LogManager.getLogger(KerberosTicketValidator.class); private static final String KEY_TAB_CONF_NAME = "KeytabConf"; private static final String SUN_KRB5_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java index 70b2f0015cf..464a0cfc61c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java @@ -115,7 +115,7 @@ public class LdapSessionFactory extends SessionFactory { * Securely escapes the username and inserts it into the template using MessageFormat * * @param username username to insert into the DN template. Any commas, equals or plus will be escaped. - * @return DN (distinquished name) build from the template. + * @return DN (distinguished name) build from the template. */ String buildDnFromTemplate(String username, String template) { //this value must be escaped to avoid manipulation of the template DN. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java index 90cecd1e48a..d2d87db683c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java @@ -23,6 +23,7 @@ import com.unboundid.ldap.sdk.SearchResultEntry; import com.unboundid.ldap.sdk.SearchResultReference; import com.unboundid.ldap.sdk.SearchScope; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.core.internal.io.IOUtils; @@ -32,7 +33,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.threadpool.ThreadPool; @@ -55,7 +55,7 @@ public final class LdapUtils { public static final Filter OBJECT_CLASS_PRESENCE_FILTER = Filter.createPresenceFilter("objectClass"); - private static final Logger LOGGER = ESLoggerFactory.getLogger(LdapUtils.class); + private static final Logger LOGGER = LogManager.getLogger(LdapUtils.class); private LdapUtils() { } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java index 36ad208df2b..7c982e6b1b3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java @@ -426,7 +426,10 @@ public final class SamlRealm extends Realm implements Releasable { final Map tokenMetadata = createTokenMetadata(attributes.name(), attributes.session()); ActionListener wrappedListener = ActionListener.wrap(auth -> { if (auth.isAuthenticated()) { - config.threadContext().putTransient(CONTEXT_TOKEN_DATA, tokenMetadata); + // Add the SAML token details as metadata on the authentication + Map metadata = new HashMap<>(auth.getMetadata()); + metadata.put(CONTEXT_TOKEN_DATA, tokenMetadata); + auth = AuthenticationResult.success(auth.getUser(), metadata); } baseListener.onResponse(auth); }, baseListener::onFailure); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlUtils.java index 9b8d73c2e61..8451f3553df 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlUtils.java @@ -37,12 +37,12 @@ import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.hash.MessageDigests; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.xpack.security.support.RestorableContextClassLoader; import org.opensaml.core.config.InitializationService; import org.opensaml.core.xml.XMLObject; @@ -71,7 +71,7 @@ public class SamlUtils { private static final SecureRandom SECURE_RANDOM = new SecureRandom(); private static XMLObjectBuilderFactory builderFactory = null; - private static final Logger LOGGER = ESLoggerFactory.getLogger(SamlUtils.class); + private static final Logger LOGGER = LogManager.getLogger(SamlUtils.class); /** * This is needed in order to initialize the underlying OpenSAML library. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java index af93a180072..fdb2fd0f33d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ListenableFuture; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -30,6 +31,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm private final Cache> cache; private final ThreadPool threadPool; + private final boolean authenticationEnabled; final Hasher cacheHasher; protected CachingUsernamePasswordRealm(String type, RealmConfig config, ThreadPool threadPool) { @@ -45,6 +47,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm } else { cache = null; } + this.authenticationEnabled = CachingUsernamePasswordRealmSettings.AUTHC_ENABLED_SETTING.get(config.settings()); } @Override @@ -63,15 +66,34 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm } } + @Override + public UsernamePasswordToken token(ThreadContext threadContext) { + if (authenticationEnabled == false) { + return null; + } + return super.token(threadContext); + } + + @Override + public boolean supports(AuthenticationToken token) { + return authenticationEnabled && super.supports(token); + } + /** * If the user exists in the cache (keyed by the principle name), then the password is validated * against a hash also stored in the cache. Otherwise the subclass authenticates the user via - * doAuthenticate + * doAuthenticate. + * This method will respond with {@link AuthenticationResult#notHandled()} if + * {@link CachingUsernamePasswordRealmSettings#AUTHC_ENABLED_SETTING authentication is not enabled}. * @param authToken The authentication token * @param listener to be called at completion */ @Override public final void authenticate(AuthenticationToken authToken, ActionListener listener) { + if (authenticationEnabled == false) { + listener.onResponse(AuthenticationResult.notHandled()); + return; + } final UsernamePasswordToken token = (UsernamePasswordToken) authToken; try { if (cache == null) { @@ -131,7 +153,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm // is cleared of the failed authentication cache.invalidate(token.principal(), listenableCacheEntry); authenticateWithCache(token, listener); - }), threadPool.executor(ThreadPool.Names.GENERIC)); + }), threadPool.executor(ThreadPool.Names.GENERIC), threadPool.getThreadContext()); } else { // attempt authentication against the authentication source doAuthenticate(token, ActionListener.wrap(authResult -> { @@ -233,7 +255,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm } else { listener.onResponse(null); } - }, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC)); + }, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC), threadPool.getThreadContext()); } catch (final ExecutionException e) { listener.onFailure(e); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index 8a02977d55c..bdabc690f76 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -8,14 +8,17 @@ package org.elasticsearch.xpack.security.authc.support; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.stream.Collectors; import com.unboundid.ldap.sdk.DN; import com.unboundid.ldap.sdk.LDAPException; @@ -51,7 +54,7 @@ public class DnRoleMapper implements UserRoleMapper { private final Path file; private final boolean useUnmappedGroupsAsRoles; private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); - private volatile Map> dnRoles; + private volatile Map> dnRoles; public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { this.config = config; @@ -87,7 +90,7 @@ public class DnRoleMapper implements UserRoleMapper { * logging the error and skipping/removing all mappings. This is aligned with how we handle other auto-loaded files * in security. */ - public static Map> parseFileLenient(Path path, Logger logger, String realmType, String realmName) { + public static Map> parseFileLenient(Path path, Logger logger, String realmType, String realmName) { try { return parseFile(path, logger, realmType, realmName, false); } catch (Exception e) { @@ -98,7 +101,7 @@ public class DnRoleMapper implements UserRoleMapper { } } - public static Map> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) { + public static Map> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) { logger.trace("reading realm [{}/{}] role mappings file [{}]...", realmType, realmName, path.toAbsolutePath()); @@ -149,7 +152,10 @@ public class DnRoleMapper implements UserRoleMapper { logger.debug("[{}] role mappings found in file [{}] for realm [{}/{}]", dnToRoles.size(), path.toAbsolutePath(), realmType, realmName); - return unmodifiableMap(dnToRoles); + Map> normalizedMap = dnToRoles.entrySet().stream().collect(Collectors.toMap( + entry -> entry.getKey().toNormalizedString(), + entry -> Collections.unmodifiableList(new ArrayList<>(entry.getValue())))); + return unmodifiableMap(normalizedMap); } catch (IOException | SettingsException e) { throw new ElasticsearchException("could not read realm [" + realmType + "/" + realmName + "] role mappings file [" + path.toAbsolutePath() + "]", e); @@ -176,8 +182,9 @@ public class DnRoleMapper implements UserRoleMapper { Set roles = new HashSet<>(); for (String groupDnString : groupDns) { DN groupDn = dn(groupDnString); - if (dnRoles.containsKey(groupDn)) { - roles.addAll(dnRoles.get(groupDn)); + String normalizedGroupDn = groupDn.toNormalizedString(); + if (dnRoles.containsKey(normalizedGroupDn)) { + roles.addAll(dnRoles.get(normalizedGroupDn)); } else if (useUnmappedGroupsAsRoles) { roles.add(relativeName(groupDn)); } @@ -187,14 +194,14 @@ public class DnRoleMapper implements UserRoleMapper { groupDns, file.getFileName(), config.type(), config.name()); } - DN userDn = dn(userDnString); - Set rolesMappedToUserDn = dnRoles.get(userDn); + String normalizedUserDn = dn(userDnString).toNormalizedString(); + List rolesMappedToUserDn = dnRoles.get(normalizedUserDn); if (rolesMappedToUserDn != null) { roles.addAll(rolesMappedToUserDn); } if (logger.isDebugEnabled()) { logger.debug("the roles [{}], are mapped from the user [{}] using file [{}] for realm [{}/{}]", - (rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, userDnString, file.getFileName(), + (rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, normalizedUserDn, file.getFileName(), config.type(), config.name()); } return roles; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java index 8c60e565e68..2c728fa002c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java @@ -151,7 +151,7 @@ public interface UserRoleMapper { * {@link ExpressionModel} class can take a custom {@code Predicate} that tests whether the data in the model * matches the {@link FieldExpression.FieldValue value} in the expression. * - * The string constructor parameter may or may not actaully parse as a DN - the "dn" field should + * The string constructor parameter may or may not actually parse as a DN - the "dn" field should * always be a DN, however groups will be a DN if they're from an LDAP/AD realm, but often won't be for a SAML realm. * * Because the {@link FieldExpression.FieldValue} might be a pattern ({@link CharacterRunAutomaton automaton}), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 642bc167f7d..f9fe2b7eaa7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -568,9 +568,12 @@ public class AuthorizationService extends AbstractComponent { } // check for run as if (authentication.getUser().isRunAs()) { + logger.debug("action [{}] is unauthorized for user [{}] run as [{}]", action, authUser.principal(), + authentication.getUser().principal()); return authorizationError("action [{}] is unauthorized for user [{}] run as [{}]", action, authUser.principal(), authentication.getUser().principal()); } + logger.debug("action [{}] is unauthorized for user [{}]", action, authUser.principal()); return authorizationError("action [{}] is unauthorized for user [{}]", action, authUser.principal()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 34aed55bb29..cac9baf1512 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -275,26 +275,39 @@ class IndicesAndAliasesResolver { } private List replaceWildcardsWithAuthorizedAliases(String[] aliases, List authorizedAliases) { - List finalAliases = new ArrayList<>(); + final List finalAliases = new ArrayList<>(); - //IndicesAliasesRequest doesn't support empty aliases (validation fails) but GetAliasesRequest does (in which case empty means _all) + // IndicesAliasesRequest doesn't support empty aliases (validation fails) but + // GetAliasesRequest does (in which case empty means _all) if (aliases.length == 0) { finalAliases.addAll(authorizedAliases); } - for (String aliasPattern : aliases) { - if (aliasPattern.equals(MetaData.ALL)) { - finalAliases.addAll(authorizedAliases); - } else if (Regex.isSimpleMatchPattern(aliasPattern)) { - for (String authorizedAlias : authorizedAliases) { - if (Regex.simpleMatch(aliasPattern, authorizedAlias)) { - finalAliases.add(authorizedAlias); + for (String aliasExpression : aliases) { + boolean include = true; + if (aliasExpression.charAt(0) == '-') { + include = false; + aliasExpression = aliasExpression.substring(1); + } + if (MetaData.ALL.equals(aliasExpression) || Regex.isSimpleMatchPattern(aliasExpression)) { + final Set resolvedAliases = new HashSet<>(); + for (final String authorizedAlias : authorizedAliases) { + if (MetaData.ALL.equals(aliasExpression) || Regex.simpleMatch(aliasExpression, authorizedAlias)) { + resolvedAliases.add(authorizedAlias); } } + if (include) { + finalAliases.addAll(resolvedAliases); + } else { + finalAliases.removeAll(resolvedAliases); + } + } else if (include) { + finalAliases.add(aliasExpression); } else { - finalAliases.add(aliasPattern); + finalAliases.remove(aliasExpression); } } + return finalAliases; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java index 7b14f218c43..a35d6a47ce3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.security.rest; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.license.XPackLicenseState; @@ -27,7 +27,7 @@ import java.io.IOException; public class SecurityRestFilter implements RestHandler { - private static final Logger logger = ESLoggerFactory.getLogger(SecurityRestFilter.class); + private static final Logger logger = LogManager.getLogger(SecurityRestFilter.class); private final RestHandler restHandler; private final AuthenticationService service; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index fcbae00ba09..d59064619fc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -53,7 +53,7 @@ public interface ServerTransportFilter { throws IOException; /** - * The server trasnport filter that should be used in nodes as it ensures that an incoming + * The server transport filter that should be used in nodes as it ensures that an incoming * request is properly authenticated and authorized */ class NodeProfile implements ServerTransportFilter { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java index 2d891afea8c..95d821cb256 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.not; /** * This test ensures, that the plugin template upgrader can add and remove * templates when started within security, as this requires certain - * system priviliges + * system privileges */ @ClusterScope(maxNumDataNodes = 1, scope = Scope.SUITE, numClientNodes = 0) public class TemplateUpgraderTests extends SecurityIntegTestCase { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 65f69b397ba..ef5b0386bc2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -716,7 +716,7 @@ public class AuthenticationServiceTests extends ESTestCase { when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); - doThrow(authenticationError("realm doesn't want to " + "lookup")) + doThrow(authenticationError("realm doesn't want to lookup")) .when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); try { @@ -1029,12 +1029,22 @@ public class AuthenticationServiceTests extends ESTestCase { @SuppressWarnings("unchecked") private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { - doAnswer((i) -> { + final boolean separateThread = randomBoolean(); + doAnswer(i -> { ActionListener listener = (ActionListener) i.getArguments()[1]; - if (user == null) { - listener.onResponse(AuthenticationResult.notHandled()); + Runnable run = () -> { + if (user == null) { + listener.onResponse(AuthenticationResult.notHandled()); + } else { + listener.onResponse(AuthenticationResult.success(user)); + } + }; + if (separateThread) { + final Thread thread = new Thread(run); + thread.start(); + thread.join(); } else { - listener.onResponse(AuthenticationResult.success(user)); + run.run(); } return null; }).when(realm).authenticate(eq(token), any(ActionListener.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java index bddfd3f4bcf..74502b4e2b3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java @@ -40,7 +40,7 @@ public class LdapMetaDataResolverTests extends ESTestCase { new Attribute("cn", "Clint Barton"), new Attribute("uid", "hawkeye"), new Attribute("email", "clint.barton@shield.gov"), - new Attribute("memberOf", "cn=staff,ou=groups,dc=exmaple,dc=com", "cn=admin,ou=groups,dc=exmaple,dc=com") + new Attribute("memberOf", "cn=staff,ou=groups,dc=example,dc=com", "cn=admin,ou=groups,dc=example,dc=com") ); final Map map = resolve(attributes); assertThat(map.size(), equalTo(2)); @@ -75,4 +75,4 @@ public class LdapMetaDataResolverTests extends ESTestCase { resolver.resolve(null, HAWKEYE_DN, TimeValue.timeValueSeconds(1), logger, attributes, future); return future.get(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java index 1a9a5b4d56e..f8bfa241736 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.authc.ldap.support; import com.unboundid.ldap.listener.InMemoryDirectoryServer; import com.unboundid.ldap.sdk.LDAPConnection; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -17,6 +18,7 @@ import org.elasticsearch.mocksocket.MockSocket; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -25,6 +27,7 @@ import org.junit.Before; import java.io.IOException; import java.net.InetAddress; +import java.net.InetSocketAddress; import java.net.Socket; import java.util.ArrayList; import java.util.Arrays; @@ -112,7 +115,7 @@ public class SessionFactoryLoadBalancingTests extends LdapTestCase { // of the ldap server and the opening of the socket logger.debug("opening mock server socket listening on [{}]", port); Runnable runnable = () -> { - try (Socket socket = new MockSocket(InetAddress.getByName("localhost"), mockServerSocket.getLocalPort(), local, port)) { + try (Socket socket = openMockSocket(local, mockServerSocket.getLocalPort(), local, port)) { logger.debug("opened socket [{}]", socket); latch.countDown(); closeLatch.await(); @@ -149,6 +152,17 @@ public class SessionFactoryLoadBalancingTests extends LdapTestCase { } } + @SuppressForbidden(reason = "Allow opening socket for test") + private MockSocket openMockSocket(InetAddress remoteAddress, int remotePort, InetAddress localAddress, int localPort) + throws IOException { + final MockSocket socket = new MockSocket(); + socket.setReuseAddress(true); // allow binding even if the previous socket is in timed wait state. + socket.setSoLinger(true, 0); // close immediately as we are not writing anything here. + socket.bind(new InetSocketAddress(localAddress, localPort)); + SocketAccess.doPrivileged(() -> socket.connect(new InetSocketAddress(localAddress, remotePort))); + return socket; + } + public void testFailover() throws Exception { assumeTrue("at least one ldap server should be present for this test", ldapServers.length > 1); logger.debug("using [{}] ldap servers, urls {}", ldapServers.length, ldapUrls()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 7ae41de900e..22071d6010d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -1636,7 +1636,7 @@ public class SamlAuthenticatorTests extends SamlTestCase { /* Permutation 7 - Mangle the contents of the response to be - + @@ -1645,16 +1645,16 @@ public class SamlAuthenticatorTests extends SamlTestCase { */ final Element response = (Element) legitimateDocument. getElementsByTagNameNS(SAML20P_NS, "Response").item(0); - final Element extentions = legitimateDocument.createElement("Extensions"); + final Element extensions = legitimateDocument.createElement("Extensions"); final Element assertion = (Element) legitimateDocument. getElementsByTagNameNS(SAML20_NS, "Assertion").item(0); - response.insertBefore(extentions, assertion); + response.insertBefore(extensions, assertion); final Element forgedAssertion = (Element) assertion.cloneNode(true); forgedAssertion.setAttribute("ID", "_forged_assertion_id"); final Element forgedSignature = (Element) forgedAssertion. getElementsByTagNameNS("http://www.w3.org/2000/09/xmldsig#", "Signature").item(0); forgedAssertion.removeChild(forgedSignature); - extentions.appendChild(forgedAssertion); + extensions.appendChild(forgedAssertion); final SamlToken forgedToken = token(SamlUtils.toString((legitimateDocument.getDocumentElement()))); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(forgedToken)); assertThat(exception.getMessage(), containsString("Failed to parse SAML")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java index e9e8908c584..6230c637b89 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; @@ -62,7 +63,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { } } - public void testSettings() throws Exception { + public void testCacheSettings() throws Exception { String cachingHashAlgo = Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT); int maxUsers = randomIntBetween(10, 100); TimeValue ttl = TimeValue.timeValueMinutes(randomIntBetween(10, 20)); @@ -468,7 +469,9 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { List threads = new ArrayList<>(numberOfThreads); for (int i = 0; i < numberOfThreads; i++) { final boolean invalidPassword = randomBoolean(); + final int threadNum = i; threads.add(new Thread(() -> { + threadPool.getThreadContext().putTransient("key", threadNum); try { latch.countDown(); latch.await(); @@ -476,6 +479,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { UsernamePasswordToken token = new UsernamePasswordToken(username, invalidPassword ? randomPassword : password); realm.authenticate(token, ActionListener.wrap((result) -> { + assertThat(threadPool.getThreadContext().getTransient("key"), is(threadNum)); if (invalidPassword && result.isAuthenticated()) { throw new RuntimeException("invalid password led to an authenticated user: " + result); } else if (invalidPassword == false && result.isAuthenticated() == false) { @@ -528,12 +532,15 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { final CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); List threads = new ArrayList<>(numberOfThreads); for (int i = 0; i < numberOfThreads; i++) { + final int threadNum = i; threads.add(new Thread(() -> { try { + threadPool.getThreadContext().putTransient("key", threadNum); latch.countDown(); latch.await(); for (int i1 = 0; i1 < numberOfIterations; i1++) { realm.lookupUser(username, ActionListener.wrap((user) -> { + assertThat(threadPool.getThreadContext().getTransient("key"), is(threadNum)); if (user == null) { throw new RuntimeException("failed to lookup user"); } @@ -560,6 +567,33 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { assertEquals(1, lookupCounter.get()); } + public void testAuthenticateDisabled() throws Exception { + final Settings settings = Settings.builder() + .put(CachingUsernamePasswordRealmSettings.AUTHC_ENABLED_SETTING.getKey(), false) + .build(); + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final RealmConfig config = new RealmConfig("test_authentication_disabled", settings, globalSettings, env, threadContext); + final AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config, threadPool); + + final UsernamePasswordToken token = new UsernamePasswordToken("phil", new SecureString("tahiti")); + UsernamePasswordToken.putTokenHeader(threadContext, token); + assertThat(realm.token(threadContext), nullValue()); + assertThat(realm.supports(token), equalTo(false)); + + PlainActionFuture authFuture = new PlainActionFuture<>(); + realm.authenticate(token, authFuture); + final AuthenticationResult authResult = authFuture.get(); + assertThat(authResult.isAuthenticated(), equalTo(false)); + assertThat(authResult.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE)); + + PlainActionFuture lookupFuture = new PlainActionFuture<>(); + realm.lookupUser(token.principal(), lookupFuture); + final User user = lookupFuture.get(); + assertThat(user, notNullValue()); + assertThat(user.principal(), equalTo(token.principal())); + } + static class FailingAuthenticationRealm extends CachingUsernamePasswordRealm { FailingAuthenticationRealm(Settings settings, Settings global, ThreadPool threadPool) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java index 263c5ee4929..83110c7a10a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java @@ -200,27 +200,27 @@ public class DnRoleMapperTests extends ESTestCase { public void testParseFile() throws Exception { Path file = getDataPath("role_mapping.yml"); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); - Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); + Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); assertThat(mappings, notNullValue()); assertThat(mappings.size(), is(3)); DN dn = new DN("cn=avengers,ou=marvel,o=superheros"); - assertThat(mappings, hasKey(dn)); - Set roles = mappings.get(dn); + assertThat(mappings, hasKey(dn.toNormalizedString())); + List roles = mappings.get(dn.toNormalizedString()); assertThat(roles, notNullValue()); assertThat(roles, hasSize(2)); assertThat(roles, containsInAnyOrder("security", "avenger")); dn = new DN("cn=shield,ou=marvel,o=superheros"); - assertThat(mappings, hasKey(dn)); - roles = mappings.get(dn); + assertThat(mappings, hasKey(dn.toNormalizedString())); + roles = mappings.get(dn.toNormalizedString()); assertThat(roles, notNullValue()); assertThat(roles, hasSize(1)); assertThat(roles, contains("security")); dn = new DN("cn=Horatio Hornblower,ou=people,o=sevenSeas"); - assertThat(mappings, hasKey(dn)); - roles = mappings.get(dn); + assertThat(mappings, hasKey(dn.toNormalizedString())); + roles = mappings.get(dn.toNormalizedString()); assertThat(roles, notNullValue()); assertThat(roles, hasSize(1)); assertThat(roles, contains("avenger")); @@ -230,7 +230,7 @@ public class DnRoleMapperTests extends ESTestCase { Path file = createTempDir().resolve("foo.yaml"); Files.createFile(file); Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null); - Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); + Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); assertThat(mappings, notNullValue()); assertThat(mappings.isEmpty(), is(true)); List events = CapturingLogger.output(logger.getName(), Level.DEBUG); @@ -242,7 +242,7 @@ public class DnRoleMapperTests extends ESTestCase { public void testParseFile_WhenFileDoesNotExist() throws Exception { Path file = createTempDir().resolve(randomAlphaOfLength(10)); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); - Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); + Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); assertThat(mappings, notNullValue()); assertThat(mappings.isEmpty(), is(true)); @@ -272,7 +272,7 @@ public class DnRoleMapperTests extends ESTestCase { // writing in utf_16 should cause a parsing error as we try to read the file in utf_8 Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); - Map> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name"); + Map> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name"); assertThat(mappings, notNullValue()); assertThat(mappings.isEmpty(), is(true)); List events = CapturingLogger.output(logger.getName(), Level.ERROR); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 8ccac83c86f..47cf458e19a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -240,22 +240,23 @@ public class AuthorizationServiceTests extends ESTestCase { future.actionGet(); } - public void testActionsSystemUserIsAuthorized() { - TransportRequest request = mock(TransportRequest.class); + public void testActionsForSystemUserIsAuthorized() { + final TransportRequest request = mock(TransportRequest.class); // A failure would throw an exception - Authentication authentication = createAuthentication(SystemUser.INSTANCE); - authorize(authentication, "indices:monitor/whatever", request); - verify(auditTrail).accessGranted(authentication, "indices:monitor/whatever", request, - new String[]{SystemUser.ROLE_NAME}); + final Authentication authentication = createAuthentication(SystemUser.INSTANCE); + final String[] actions = { "indices:monitor/whatever", "internal:whatever", "cluster:monitor/whatever", "cluster:admin/reroute", + "indices:admin/mapping/put", "indices:admin/template/put", "indices:admin/seq_no/global_checkpoint_sync", + "indices:admin/settings/update" }; + for (String action : actions) { + authorize(authentication, action, request); + verify(auditTrail).accessGranted(authentication, action, request, new String[] { SystemUser.ROLE_NAME }); + } - authentication = createAuthentication(SystemUser.INSTANCE); - authorize(authentication, "internal:whatever", request); - verify(auditTrail).accessGranted(authentication, "internal:whatever", request, new String[]{SystemUser.ROLE_NAME}); verifyNoMoreInteractions(auditTrail); } - public void testIndicesActionsAreNotAuthorized() { + public void testIndicesActionsForSystemUserWhichAreNotAuthorized() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(SystemUser.INSTANCE); assertThrowsAuthorizationException( @@ -265,25 +266,23 @@ public class AuthorizationServiceTests extends ESTestCase { verifyNoMoreInteractions(auditTrail); } - public void testClusterAdminActionsAreNotAuthorized() { + public void testClusterAdminActionsForSystemUserWhichAreNotAuthorized() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(SystemUser.INSTANCE); assertThrowsAuthorizationException( () -> authorize(authentication, "cluster:admin/whatever", request), "cluster:admin/whatever", SystemUser.INSTANCE.principal()); - verify(auditTrail).accessDenied(authentication, "cluster:admin/whatever", request, - new String[]{SystemUser.ROLE_NAME}); + verify(auditTrail).accessDenied(authentication, "cluster:admin/whatever", request, new String[] { SystemUser.ROLE_NAME }); verifyNoMoreInteractions(auditTrail); } - public void testClusterAdminSnapshotStatusActionIsNotAuthorized() { + public void testClusterAdminSnapshotStatusActionForSystemUserWhichIsNotAuthorized() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(SystemUser.INSTANCE); assertThrowsAuthorizationException( () -> authorize(authentication, "cluster:admin/snapshot/status", request), "cluster:admin/snapshot/status", SystemUser.INSTANCE.principal()); - verify(auditTrail).accessDenied(authentication, "cluster:admin/snapshot/status", request, - new String[]{SystemUser.ROLE_NAME}); + verify(auditTrail).accessDenied(authentication, "cluster:admin/snapshot/status", request, new String[] { SystemUser.ROLE_NAME }); verifyNoMoreInteractions(auditTrail); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 39b70e0a879..f9d91527942 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -119,8 +119,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final boolean withAlias = randomBoolean(); final String securityIndexName = SECURITY_INDEX_NAME + (withAlias ? "-" + randomAlphaOfLength(5) : ""); MetaData metaData = MetaData.builder() - .put(indexBuilder("foo").putAlias(AliasMetaData.builder("foofoobar")).settings(settings)) - .put(indexBuilder("foobar").putAlias(AliasMetaData.builder("foofoobar")).settings(settings)) + .put(indexBuilder("foo").putAlias(AliasMetaData.builder("foofoobar")) + .putAlias(AliasMetaData.builder("foounauthorized")).settings(settings)) + .put(indexBuilder("foobar").putAlias(AliasMetaData.builder("foofoobar")) + .putAlias(AliasMetaData.builder("foobarfoo")).settings(settings)) .put(indexBuilder("closed").state(IndexMetaData.State.CLOSE) .putAlias(AliasMetaData.builder("foofoobar")).settings(settings)) .put(indexBuilder("foofoo-closed").state(IndexMetaData.State.CLOSE).settings(settings)) @@ -145,7 +147,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { userDashIndices = new User("dash", "dash"); userNoIndices = new User("test", "test"); rolesStore = mock(CompositeRolesStore.class); - String[] authorizedIndices = new String[] { "bar", "bar-closed", "foofoobar", "foofoo", "missing", "foofoo-closed"}; + String[] authorizedIndices = new String[] { "bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "missing", "foofoo-closed"}; String[] dashIndices = new String[]{"-index10", "-index11", "-index20", "-index21"}; roleMap = new HashMap<>(); roleMap.put("role", new RoleDescriptor("role", null, @@ -313,7 +315,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest(); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, true)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed"}; + String[] replacedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; assertThat(indices.size(), equalTo(replacedIndices.length)); assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); @@ -324,10 +326,8 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest(); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, false)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "foofoobar", "foofoo"}; - assertThat(indices.size(), equalTo(replacedIndices.length)); - assertThat(request.indices().length, equalTo(replacedIndices.length)); - assertThat(indices, hasItems(replacedIndices)); + String[] replacedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo"}; + assertSameValues(indices, replacedIndices); assertThat(request.indices(), arrayContainingInAnyOrder(replacedIndices)); } @@ -335,7 +335,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("_all"); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, true)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed"}; + String[] replacedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; assertThat(indices.size(), equalTo(replacedIndices.length)); assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); @@ -346,7 +346,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("_all"); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, false)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "foofoobar", "foofoo"}; + String[] replacedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo"}; assertThat(indices.size(), equalTo(replacedIndices.length)); assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); @@ -401,7 +401,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("*", "-foofoo*"); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, false)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar"}; + String[] replacedIndices = new String[]{"bar", "foobarfoo"}; assertThat(indices.size(), equalTo(replacedIndices.length)); assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); @@ -412,7 +412,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("*", "-foofoo*"); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, true)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "bar-closed"}; + String[] replacedIndices = new String[]{"bar", "foobarfoo", "bar-closed"}; assertThat(indices.size(), equalTo(replacedIndices.length)); assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); @@ -423,18 +423,16 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("*", "-foofoo*", "barbaz", "foob*"); request.indicesOptions(IndicesOptions.fromOptions(false, true, true, false)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "barbaz"}; - assertThat(indices.size(), equalTo(replacedIndices.length)); - assertThat(request.indices().length, equalTo(replacedIndices.length)); - assertThat(indices, hasItems(replacedIndices)); - assertThat(request.indices(), arrayContainingInAnyOrder(replacedIndices)); + String[] replacedIndices = new String[]{"bar", "foobarfoo", "barbaz"}; + assertSameValues(indices, replacedIndices); + assertThat(request.indices(), arrayContainingInAnyOrder("bar", "foobarfoo", "barbaz", "foobarfoo")); } public void testResolveWildcardsPlusAndMinusExpandWilcardsOpenIgnoreUnavailable() { SearchRequest request = new SearchRequest("*", "-foofoo*", "+barbaz", "+foob*"); request.indicesOptions(IndicesOptions.fromOptions(true, true, true, false)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar"}; + String[] replacedIndices = new String[]{"bar", "foobarfoo"}; assertThat(indices.size(), equalTo(replacedIndices.length)); assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); @@ -445,10 +443,8 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("*", "-foofoo*", "barbaz"); request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), true, true)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "bar-closed", "barbaz"}; - assertThat(indices.size(), equalTo(replacedIndices.length)); - assertThat(request.indices().length, equalTo(replacedIndices.length)); - assertThat(indices, hasItems(replacedIndices)); + String[] replacedIndices = new String[]{"bar", "bar-closed", "barbaz", "foobarfoo"}; + assertSameValues(indices, replacedIndices); assertThat(request.indices(), arrayContainingInAnyOrder(replacedIndices)); } @@ -456,9 +452,8 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { SearchRequest request = new SearchRequest("*", "-foofoo*", "barbaz"); request.indicesOptions(IndicesOptions.fromOptions(true, randomBoolean(), true, true)); List indices = resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME)).getLocal(); - String[] replacedIndices = new String[]{"bar", "bar-closed"}; + String[] replacedIndices = new String[]{"bar", "bar-closed", "foobarfoo"}; assertThat(indices.size(), equalTo(replacedIndices.length)); - assertThat(request.indices().length, equalTo(replacedIndices.length)); assertThat(indices, hasItems(replacedIndices)); assertThat(request.indices(), arrayContainingInAnyOrder(replacedIndices)); } @@ -753,13 +748,13 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do //the same and resolve those aliases to their corresponding concrete indices (which we let core do) - String[] expectedIndices = new String[]{"bar", "foofoobar", "foofoo"}; + String[] expectedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo"}; assertSameValues(indices, expectedIndices); //alias foofoobar on both sides, that's fine, es core would do the same, same as above assertThat(request.getAliasActions().get(0).indices(), arrayContainingInAnyOrder("bar", "foofoo")); - assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar")); + assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); assertThat(request.getAliasActions().get(1).indices(), arrayContainingInAnyOrder("bar")); - assertThat(request.getAliasActions().get(1).aliases(), arrayContainingInAnyOrder("foofoobar")); + assertThat(request.getAliasActions().get(1).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); } public void testResolveAllAliasesWildcardsIndicesAliasesRequestDeleteActions() { @@ -771,13 +766,13 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do //the same and resolve those aliases to their corresponding concrete indices (which we let core do) - String[] expectedIndices = new String[]{"bar", "foofoobar", "foofoo", "explicit"}; + String[] expectedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo", "explicit"}; assertSameValues(indices, expectedIndices); //alias foofoobar on both sides, that's fine, es core would do the same, same as above assertThat(request.getAliasActions().get(0).indices(), arrayContainingInAnyOrder("bar", "foofoo")); - assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar")); + assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); assertThat(request.getAliasActions().get(0).indices(), arrayContainingInAnyOrder("bar", "foofoo")); - assertThat(request.getAliasActions().get(1).aliases(), arrayContainingInAnyOrder("foofoobar", "explicit")); + assertThat(request.getAliasActions().get(1).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "explicit")); } public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActionsNoAuthorizedIndices() { @@ -786,7 +781,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { //no authorized aliases match bar*, hence aliases are replaced with empty string for that action request.addAliasAction(AliasActions.remove().index("*bar").alias("bar*")); resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); - assertThat(request.getAliasActions().get(0).aliases().length, equalTo(1)); + assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); assertThat(request.getAliasActions().get(1).aliases().length, equalTo(0)); } @@ -886,11 +881,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned, based on indices and aliases that user is authorized for - String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoo-closed", "foofoobar"}; + String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoo-closed", "foofoobar", "foobarfoo"}; assertThat(indices.size(), equalTo(expectedIndices.length)); assertThat(indices, hasItems(expectedIndices)); //wildcards get replaced on each single action - assertThat(request.indices(), arrayContainingInAnyOrder("foofoobar", "foofoo", "foofoo-closed")); + assertThat(request.indices(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "foofoo", "foofoo-closed")); assertThat(request.aliases(), arrayContainingInAnyOrder("alias1")); } @@ -902,11 +897,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned, based on indices and aliases that user is authorized for - String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoobar"}; + String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoobar", "foobarfoo"}; assertThat(indices.size(), equalTo(expectedIndices.length)); assertThat(indices, hasItems(expectedIndices)); //wildcards get replaced on each single action - assertThat(request.indices(), arrayContainingInAnyOrder("foofoobar", "foofoo")); + assertThat(request.indices(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "foofoo")); assertThat(request.aliases(), arrayContainingInAnyOrder("alias1")); } @@ -918,11 +913,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned, based on indices and aliases that user is authorized for - String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoobar", "bar"}; + String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoobar", "foobarfoo", "bar"}; assertThat(indices.size(), equalTo(expectedIndices.length)); assertThat(indices, hasItems(expectedIndices)); //wildcards get replaced on each single action - assertThat(request.indices(), arrayContainingInAnyOrder("foofoobar", "foofoo", "bar")); + assertThat(request.indices(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "foofoo", "bar")); assertThat(request.aliases(), arrayContainingInAnyOrder("alias1")); } @@ -954,10 +949,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned - String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed", "alias1"}; + String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed", "alias1"}; assertThat(indices.size(), equalTo(expectedIndices.length)); assertThat(indices, hasItems(expectedIndices)); - String[] replacedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed"}; + String[] replacedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; //_all gets replaced with all indices that user is authorized for assertThat(request.indices(), arrayContainingInAnyOrder(replacedIndices)); assertThat(request.aliases(), arrayContainingInAnyOrder("alias1")); @@ -975,10 +970,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned - String[] expectedIndices = new String[]{"bar", "foofoobar", "foofoo", "alias1"}; + String[] expectedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo", "alias1"}; assertThat(indices.size(), equalTo(expectedIndices.length)); assertThat(indices, hasItems(expectedIndices)); - String[] replacedIndices = new String[]{"bar", "foofoobar", "foofoo"}; + String[] replacedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo"}; //_all gets replaced with all indices that user is authorized for assertThat(request.indices(), arrayContainingInAnyOrder(replacedIndices)); assertThat(request.aliases(), arrayContainingInAnyOrder("alias1")); @@ -1034,11 +1029,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned - String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed"}; + String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; assertSameValues(indices, expectedIndices); //_all gets replaced with all indices that user is authorized for assertThat(request.indices(), arrayContainingInAnyOrder(expectedIndices)); - assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar")); + assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); } public void testResolveAllAndExplicitAliasesGetAliasesRequest() { @@ -1049,11 +1044,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned - String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed", "explicit"}; + String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed", "explicit"}; assertSameValues(indices, expectedIndices); //_all gets replaced with all indices that user is authorized for - assertThat(request.indices(), arrayContainingInAnyOrder("bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed")); - assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar", "explicit")); + assertThat(request.indices(), arrayContainingInAnyOrder("bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed")); + assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "explicit")); } public void testResolveAllAndWildcardsAliasesGetAliasesRequest() { @@ -1064,11 +1059,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned - String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foofoo", "foofoo-closed"}; + String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; assertSameValues(indices, expectedIndices); //_all gets replaced with all indices that user is authorized for assertThat(request.indices(), arrayContainingInAnyOrder(expectedIndices)); - assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar", "foofoobar")); + assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar", "foofoobar", "foobarfoo", "foobarfoo")); } public void testResolveAliasesWildcardsGetAliasesRequest() { @@ -1080,11 +1075,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do //the same and resolve those aliases to their corresponding concrete indices (which we let core do) - String[] expectedIndices = new String[]{"bar", "foofoobar"}; + String[] expectedIndices = new String[]{"bar", "foobarfoo", "foofoobar"}; assertSameValues(indices, expectedIndices); //alias foofoobar on both sides, that's fine, es core would do the same, same as above assertThat(request.indices(), arrayContainingInAnyOrder("bar", "foofoobar")); - assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar")); + assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); } public void testResolveAliasesWildcardsGetAliasesRequestNoAuthorizedIndices() { @@ -1096,6 +1091,21 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { assertThat(request.aliases().length, equalTo(0)); } + public void testResolveAliasesExclusionWildcardsGetAliasesRequest() { + GetAliasesRequest request = new GetAliasesRequest(); + request.aliases("foo*","-foobar*"); + final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + List indices = resolveIndices(request, authorizedIndices).getLocal(); + //union of all resolved indices and aliases gets returned, based on what user is authorized for + //note that the index side will end up containing matching aliases too, which is fine, as es core would do + //the same and resolve those aliases to their corresponding concrete indices (which we let core do) + String[] expectedIndices = new String[]{"bar", "bar-closed", "foobarfoo", "foofoo", "foofoo-closed", "foofoobar"}; + assertSameValues(indices, expectedIndices); + //alias foofoobar on both sides, that's fine, es core would do the same, same as above + assertThat(request.indices(), arrayContainingInAnyOrder("bar", "bar-closed", "foobarfoo", "foofoo", "foofoo-closed", "foofoobar")); + assertThat(request.aliases(), arrayContainingInAnyOrder("foofoobar")); + } + public void testResolveAliasesAllGetAliasesRequestNoAuthorizedIndices() { GetAliasesRequest request = new GetAliasesRequest(); if (randomBoolean()) { @@ -1164,7 +1174,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { { RefreshRequest request = new RefreshRequest("*"); List indices = resolveIndices(request, authorizedIndices).getLocal(); - String[] expectedIndices = new String[]{"bar", "foofoobar", "foofoo"}; + String[] expectedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo"}; assertThat(indices.size(), equalTo(expectedIndices.length)); assertThat(indices, hasItems(expectedIndices)); assertThat(request.indices(), arrayContainingInAnyOrder(expectedIndices)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java index 9a960eae2d6..65a5fb080cd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java @@ -121,7 +121,7 @@ public class IpFilteringUpdateTests extends SecurityIntegTestCase { } } - // issue #762, occured because in the above test we use HTTP and transport + // issue #762, occurred because in the above test we use HTTP and transport public void testThatDisablingIpFilterWorksAsExpected() throws Exception { Settings settings = Settings.builder() .put("xpack.security.transport.filter.deny", "127.0.0.8") diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml index 11657750c51..1bef00f7371 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml @@ -23,7 +23,7 @@ role3: cluster: ALL indices: '*': ALL -# invalid role indices privilegs +# invalid role indices privileges role4: cluster: ALL indices: diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java index ccba7429a4b..831b3177f56 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java @@ -43,7 +43,7 @@ import javax.sql.DataSource; * This class tries to cater to both audiences - use the legacy, Writer way if needed though strive to use the * proper typical approach, that of specifying intention and output (file) in the URL. * - * For this reason the {@link System#out} and {@link System#err} are being refered in this class though are used only + * For this reason the {@link System#out} and {@link System#err} are being referred in this class though are used only * when needed. */ public final class Debug { diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java index c06a96c9881..970a188df7c 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java @@ -202,8 +202,7 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper { + "CHAR,CHAR_LENGTH,CHARACTER_LENGTH,CONCAT," + "INSERT," + "LCASE,LEFT,LENGTH,LOCATE,LTRIM," - // waiting on https://github.com/elastic/elasticsearch/issues/33477 - //+ "OCTET_LENGTH," + + "OCTET_LENGTH," + "POSITION," + "REPEAT,REPLACE,RIGHT,RTRIM," + "SPACE,SUBSTRING," diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java index ec76b6ab34a..d513ca07df4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java @@ -23,7 +23,7 @@ public class FunctionDefinition { private final List aliases; private final Class clazz; /** - * Is this a datetime function comaptible with {@code EXTRACT}. + * Is this a datetime function compatible with {@code EXTRACT}. */ private final boolean datetime; private final Builder builder; @@ -60,7 +60,7 @@ public class FunctionDefinition { } /** - * Is this a datetime function comaptible with {@code EXTRACT}. + * Is this a datetime function compatible with {@code EXTRACT}. */ boolean datetime() { return datetime; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index caafd8294c6..4da4cf4d023 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -72,6 +72,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Locate; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.OctetLength; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Position; import org.elasticsearch.xpack.sql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Repeat; @@ -92,124 +93,143 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.TimeZone; import java.util.function.BiFunction; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableList; import static java.util.stream.Collectors.toList; public class FunctionRegistry { - private static final List DEFAULT_FUNCTIONS = unmodifiableList(Arrays.asList( - // Aggregate functions - def(Avg.class, Avg::new), - def(Count.class, Count::new), - def(Max.class, Max::new), - def(Min.class, Min::new), - def(Sum.class, Sum::new), - // Statistics - def(StddevPop.class, StddevPop::new), - def(VarPop.class, VarPop::new), - def(Percentile.class, Percentile::new), - def(PercentileRank.class, PercentileRank::new), - def(SumOfSquares.class, SumOfSquares::new), - def(Skewness.class, Skewness::new), - def(Kurtosis.class, Kurtosis::new), - // Scalar functions - // Date - def(DayName.class, DayName::new, "DAYNAME"), - def(DayOfMonth.class, DayOfMonth::new, "DAYOFMONTH", "DAY", "DOM"), - def(DayOfWeek.class, DayOfWeek::new, "DAYOFWEEK", "DOW"), - def(DayOfYear.class, DayOfYear::new, "DAYOFYEAR", "DOY"), - def(HourOfDay.class, HourOfDay::new, "HOUR"), - def(MinuteOfDay.class, MinuteOfDay::new), - def(MinuteOfHour.class, MinuteOfHour::new, "MINUTE"), - def(MonthName.class, MonthName::new, "MONTHNAME"), - def(MonthOfYear.class, MonthOfYear::new, "MONTH"), - def(SecondOfMinute.class, SecondOfMinute::new, "SECOND"), - def(Quarter.class, Quarter::new), - def(Year.class, Year::new), - def(WeekOfYear.class, WeekOfYear::new, "WEEK"), - // Math - def(Abs.class, Abs::new), - def(ACos.class, ACos::new), - def(ASin.class, ASin::new), - def(ATan.class, ATan::new), - def(ATan2.class, ATan2::new), - def(Cbrt.class, Cbrt::new), - def(Ceil.class, Ceil::new, "CEILING"), - def(Cos.class, Cos::new), - def(Cosh.class, Cosh::new), - def(Cot.class, Cot::new), - def(Degrees.class, Degrees::new), - def(E.class, E::new), - def(Exp.class, Exp::new), - def(Expm1.class, Expm1::new), - def(Floor.class, Floor::new), - def(Log.class, Log::new), - def(Log10.class, Log10::new), - // SQL and ODBC require MOD as a _function_ - def(Mod.class, Mod::new), - def(Pi.class, Pi::new), - def(Power.class, Power::new), - def(Radians.class, Radians::new), - def(Random.class, Random::new, "RAND"), - def(Round.class, Round::new), - def(Sign.class, Sign::new, "SIGNUM"), - def(Sin.class, Sin::new), - def(Sinh.class, Sinh::new), - def(Sqrt.class, Sqrt::new), - def(Tan.class, Tan::new), - def(Truncate.class, Truncate::new), - // String - def(Ascii.class, Ascii::new), - def(BitLength.class, BitLength::new), - def(Char.class, Char::new), - def(CharLength.class, CharLength::new, "CHARACTER_LENGTH"), - def(Concat.class, Concat::new), - def(Insert.class, Insert::new), - def(LCase.class, LCase::new), - def(Left.class, Left::new), - def(Length.class, Length::new), - def(Locate.class, Locate::new), - def(LTrim.class, LTrim::new), - def(Position.class, Position::new), - def(Repeat.class, Repeat::new), - def(Replace.class, Replace::new), - def(Right.class, Right::new), - def(RTrim.class, RTrim::new), - def(Space.class, Space::new), - def(Substring.class, Substring::new), - def(UCase.class, UCase::new), - // Special - def(Score.class, Score::new))); - + // list of functions grouped by type of functions (aggregate, statistics, math etc) and ordered alphabetically inside each group + // a single function will have one entry for itself with its name associated to its instance and, also, one entry for each alias + // it has with the alias name associated to the FunctionDefinition instance private final Map defs = new LinkedHashMap<>(); - private final Map aliases; + private final Map aliases = new HashMap<>(); /** * Constructor to build with the default list of functions. */ public FunctionRegistry() { - this(DEFAULT_FUNCTIONS); + defineDefaultFunctions(); } - + /** * Constructor specifying alternate functions for testing. */ - FunctionRegistry(List functions) { - this.aliases = new HashMap<>(); + FunctionRegistry(FunctionDefinition... functions) { + addToMap(functions); + } + + private void defineDefaultFunctions() { + // Aggregate functions + addToMap(def(Avg.class, Avg::new), + def(Count.class, Count::new), + def(Max.class, Max::new), + def(Min.class, Min::new), + def(Sum.class, Sum::new)); + // Statistics + addToMap(def(StddevPop.class, StddevPop::new), + def(VarPop.class, VarPop::new), + def(Percentile.class, Percentile::new), + def(PercentileRank.class, PercentileRank::new), + def(SumOfSquares.class, SumOfSquares::new), + def(Skewness.class, Skewness::new), + def(Kurtosis.class, Kurtosis::new)); + // Scalar functions + // Date + addToMap(def(DayName.class, DayName::new, "DAYNAME"), + def(DayOfMonth.class, DayOfMonth::new, "DAYOFMONTH", "DAY", "DOM"), + def(DayOfWeek.class, DayOfWeek::new, "DAYOFWEEK", "DOW"), + def(DayOfYear.class, DayOfYear::new, "DAYOFYEAR", "DOY"), + def(HourOfDay.class, HourOfDay::new, "HOUR"), + def(MinuteOfDay.class, MinuteOfDay::new), + def(MinuteOfHour.class, MinuteOfHour::new, "MINUTE"), + def(MonthName.class, MonthName::new, "MONTHNAME"), + def(MonthOfYear.class, MonthOfYear::new, "MONTH"), + def(SecondOfMinute.class, SecondOfMinute::new, "SECOND"), + def(Quarter.class, Quarter::new), + def(Year.class, Year::new), + def(WeekOfYear.class, WeekOfYear::new, "WEEK")); + // Math + addToMap(def(Abs.class, Abs::new), + def(ACos.class, ACos::new), + def(ASin.class, ASin::new), + def(ATan.class, ATan::new), + def(ATan2.class, ATan2::new), + def(Cbrt.class, Cbrt::new), + def(Ceil.class, Ceil::new, "CEILING"), + def(Cos.class, Cos::new), + def(Cosh.class, Cosh::new), + def(Cot.class, Cot::new), + def(Degrees.class, Degrees::new), + def(E.class, E::new), + def(Exp.class, Exp::new), + def(Expm1.class, Expm1::new), + def(Floor.class, Floor::new), + def(Log.class, Log::new), + def(Log10.class, Log10::new), + // SQL and ODBC require MOD as a _function_ + def(Mod.class, Mod::new), + def(Pi.class, Pi::new), + def(Power.class, Power::new), + def(Radians.class, Radians::new), + def(Random.class, Random::new, "RAND"), + def(Round.class, Round::new), + def(Sign.class, Sign::new, "SIGNUM"), + def(Sin.class, Sin::new), + def(Sinh.class, Sinh::new), + def(Sqrt.class, Sqrt::new), + def(Tan.class, Tan::new), + def(Truncate.class, Truncate::new)); + // String + addToMap(def(Ascii.class, Ascii::new), + def(BitLength.class, BitLength::new), + def(Char.class, Char::new), + def(CharLength.class, CharLength::new, "CHARACTER_LENGTH"), + def(Concat.class, Concat::new), + def(Insert.class, Insert::new), + def(LCase.class, LCase::new), + def(Left.class, Left::new), + def(Length.class, Length::new), + def(Locate.class, Locate::new), + def(LTrim.class, LTrim::new), + def(OctetLength.class, OctetLength::new), + def(Position.class, Position::new), + def(Repeat.class, Repeat::new), + def(Replace.class, Replace::new), + def(Right.class, Right::new), + def(RTrim.class, RTrim::new), + def(Space.class, Space::new), + def(Substring.class, Substring::new), + def(UCase.class, UCase::new)); + // Special + addToMap(def(Score.class, Score::new)); + } + + protected void addToMap(FunctionDefinition...functions) { + // temporary map to hold [function_name/alias_name : function instance] + Map batchMap = new HashMap<>(); for (FunctionDefinition f : functions) { - defs.put(f.name(), f); + batchMap.put(f.name(), f); for (String alias : f.aliases()) { - Object old = aliases.put(alias, f.name()); - if (old != null) { - throw new IllegalArgumentException("alias [" + alias + "] is used by [" + old + "] and [" + f.name() + "]"); + Object old = batchMap.put(alias, f); + if (old != null || defs.containsKey(alias)) { + throw new IllegalArgumentException("alias [" + alias + "] is used by " + + "[" + (old != null ? old : defs.get(alias).name()) + "] and [" + f.name() + "]"); } - defs.put(alias, f); + aliases.put(alias, f.name()); } } + // sort the temporary map by key name and add it to the global map of functions + defs.putAll(batchMap.entrySet().stream() + .sorted(Map.Entry.comparingByKey()) + .collect(Collectors., String, + FunctionDefinition, LinkedHashMap> toMap(Map.Entry::getKey, Map.Entry::getValue, + (oldValue, newValue) -> oldValue, LinkedHashMap::new))); } public FunctionDefinition resolveFunction(String functionName) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java index 3254e0538f0..0cd6268c838 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; /** - * Returns returns the number of bits contained within the value expression. + * Returns the number of bits contained within the value expression. */ public class BitLength extends UnaryStringFunction { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java new file mode 100644 index 00000000000..4153769f50c --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Returns the number of bytes contained within the value expression. + */ +public class OctetLength extends UnaryStringFunction { + + public OctetLength(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, OctetLength::new, field()); + } + + @Override + protected OctetLength replaceChild(Expression newChild) { + return new OctetLength(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.OCTET_LENGTH; + } + + @Override + public DataType dataType() { + return DataType.INTEGER; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java index 1f38456cba1..33eb61012ea 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java @@ -15,7 +15,7 @@ abstract class StringFunctionUtils { * * @param s the original String * @param start starting position for the substring within the original string. 0-based index position - * @param length length in characters of the substracted substring + * @param length length in characters of the subtracted substring * @return the resulting String */ static String substring(String s, int start, int length) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java index 56ef820a4d3..bbeeb417c59 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java @@ -65,6 +65,7 @@ public class StringProcessor implements Processor { return new String(spaces); }), BIT_LENGTH((String s) -> UnicodeUtil.calcUTF16toUTF8Length(s, 0, s.length()) * 8), + OCTET_LENGTH((String s) -> UnicodeUtil.calcUTF16toUTF8Length(s, 0, s.length())), CHAR_LENGTH(String::length); private final Function apply; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 782b2cc66cb..ec52d64cfbf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -43,6 +43,14 @@ public final class InternalSqlScriptUtils { return QuarterProcessor.quarter(millis, tzId); } + public static Number round(Number v, Number s) { + return BinaryMathOperation.ROUND.apply(v, s); + } + + public static Number truncate(Number v, Number s) { + return BinaryMathOperation.TRUNCATE.apply(v, s); + } + public static Integer ascii(String s) { return (Integer) StringOperation.ASCII.apply(s); } @@ -59,75 +67,71 @@ public final class InternalSqlScriptUtils { return (Integer) StringOperation.CHAR_LENGTH.apply(s); } - public static String lcase(String s) { - return (String) StringOperation.LCASE.apply(s); - } - - public static String ucase(String s) { - return (String) StringOperation.UCASE.apply(s); - } - - public static Integer length(String s) { - return (Integer) StringOperation.LENGTH.apply(s); - } - - public static String rtrim(String s) { - return (String) StringOperation.RTRIM.apply(s); - } - - public static String ltrim(String s) { - return (String) StringOperation.LTRIM.apply(s); - } - - public static String space(Number n) { - return (String) StringOperation.SPACE.apply(n); - } - - public static String left(String s, int count) { - return BinaryStringNumericOperation.LEFT.apply(s, count); - } - - public static String right(String s, int count) { - return BinaryStringNumericOperation.RIGHT.apply(s, count); - } - public static String concat(String s1, String s2) { return ConcatFunctionProcessor.doProcessInScripts(s1, s2).toString(); } - public static String repeat(String s, int count) { - return BinaryStringNumericOperation.REPEAT.apply(s, count); - } - - public static Integer position(String s1, String s2) { - return (Integer) BinaryStringStringOperation.POSITION.apply(s1, s2); - } - public static String insert(String s, int start, int length, String r) { return InsertFunctionProcessor.doProcess(s, start, length, r).toString(); } - public static String substring(String s, int start, int length) { - return SubstringFunctionProcessor.doProcess(s, start, length).toString(); + public static String lcase(String s) { + return (String) StringOperation.LCASE.apply(s); } - public static String replace(String s1, String s2, String s3) { - return ReplaceFunctionProcessor.doProcess(s1, s2, s3).toString(); + public static String left(String s, int count) { + return BinaryStringNumericOperation.LEFT.apply(s, count); } - public static Integer locate(String s1, String s2, Integer pos) { - return (Integer) LocateFunctionProcessor.doProcess(s1, s2, pos); + public static Integer length(String s) { + return (Integer) StringOperation.LENGTH.apply(s); } public static Integer locate(String s1, String s2) { return locate(s1, s2, null); } - public static Number round(Number v, Number s) { - return BinaryMathOperation.ROUND.apply(v, s); + public static Integer locate(String s1, String s2, Integer pos) { + return (Integer) LocateFunctionProcessor.doProcess(s1, s2, pos); } - public static Number truncate(Number v, Number s) { - return BinaryMathOperation.TRUNCATE.apply(v, s); + public static String ltrim(String s) { + return (String) StringOperation.LTRIM.apply(s); + } + + public static Integer octetLength(String s) { + return (Integer) StringOperation.OCTET_LENGTH.apply(s); + } + + public static Integer position(String s1, String s2) { + return (Integer) BinaryStringStringOperation.POSITION.apply(s1, s2); + } + + public static String repeat(String s, int count) { + return BinaryStringNumericOperation.REPEAT.apply(s, count); + } + + public static String replace(String s1, String s2, String s3) { + return ReplaceFunctionProcessor.doProcess(s1, s2, s3).toString(); + } + + public static String right(String s, int count) { + return BinaryStringNumericOperation.RIGHT.apply(s, count); + } + + public static String rtrim(String s) { + return (String) StringOperation.RTRIM.apply(s); + } + + public static String space(Number n) { + return (String) StringOperation.SPACE.apply(n); + } + + public static String substring(String s, int start, int length) { + return SubstringFunctionProcessor.doProcess(s, start, length).toString(); + } + + public static String ucase(String s) { + return (String) StringOperation.UCASE.apply(s); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 640008aae3c..0c738bcc1e9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -291,7 +291,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { } return new Neg(source(ctx.operator), value); default: - throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText()); + throw new ParsingException(loc, "Unknown arithmetic {}", ctx.operator.getText()); } } @@ -314,7 +314,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { case SqlBaseParser.MINUS: return new Sub(loc, left, right); default: - throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText()); + throw new ParsingException(loc, "Unknown arithmetic {}", ctx.operator.getText()); } } diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 1bb2802a5db..f8b15b202d8 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -16,8 +16,8 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS Number truncate(Number, Number) Integer ascii(String) Integer bitLength(String) - Integer charLength(String) String character(Number) + Integer charLength(String) String concat(String, String) String insert(String, int, int, String) String lcase(String) @@ -26,6 +26,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS Integer locate(String, String) Integer locate(String, String, Integer) String ltrim(String) + Integer octetLength(String) Integer position(String, String) String repeat(String, int) String replace(String, String, String) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java index 0ca75ee05d9..030350c39a6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.TimeZone; @@ -34,7 +33,7 @@ import static org.mockito.Mockito.mock; public class FunctionRegistryTests extends ESTestCase { public void testNoArgFunction() { UnresolvedFunction ur = uf(STANDARD); - FunctionRegistry r = new FunctionRegistry(Collections.singletonList(def(DummyFunction.class, DummyFunction::new))); + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new)); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); @@ -51,11 +50,10 @@ public class FunctionRegistryTests extends ESTestCase { public void testUnaryFunction() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(Collections.singletonList( - def(DummyFunction.class, (Location l, Expression e) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e) -> { assertSame(e, ur.children().get(0)); return new DummyFunction(l); - }))); + })); FunctionDefinition def = r.resolveFunction(ur.name()); assertFalse(def.datetime()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); @@ -79,12 +77,11 @@ public class FunctionRegistryTests extends ESTestCase { public void testUnaryDistinctAwareFunction() { boolean urIsDistinct = randomBoolean(); UnresolvedFunction ur = uf(urIsDistinct ? DISTINCT : STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(Collections.singletonList( - def(DummyFunction.class, (Location l, Expression e, boolean distinct) -> { - assertEquals(urIsDistinct, distinct); - assertSame(e, ur.children().get(0)); - return new DummyFunction(l); - }))); + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, boolean distinct) -> { + assertEquals(urIsDistinct, distinct); + assertSame(e, ur.children().get(0)); + return new DummyFunction(l); + })); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); assertFalse(def.datetime()); @@ -104,12 +101,11 @@ public class FunctionRegistryTests extends ESTestCase { boolean urIsExtract = randomBoolean(); UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class)); TimeZone providedTimeZone = randomTimeZone(); - FunctionRegistry r = new FunctionRegistry(Collections.singletonList( - def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> { - assertEquals(providedTimeZone, tz); - assertSame(e, ur.children().get(0)); - return new DummyFunction(l); - }))); + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> { + assertEquals(providedTimeZone, tz); + assertSame(e, ur.children().get(0)); + return new DummyFunction(l); + })); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(providedTimeZone, def).location()); assertTrue(def.datetime()); @@ -132,12 +128,11 @@ public class FunctionRegistryTests extends ESTestCase { public void testBinaryFunction() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class), mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(Collections.singletonList( - def(DummyFunction.class, (Location l, Expression lhs, Expression rhs) -> { - assertSame(lhs, ur.children().get(0)); - assertSame(rhs, ur.children().get(1)); - return new DummyFunction(l); - }))); + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression lhs, Expression rhs) -> { + assertSame(lhs, ur.children().get(0)); + assertSame(rhs, ur.children().get(1)); + return new DummyFunction(l); + })); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); assertFalse(def.datetime()); @@ -163,14 +158,34 @@ public class FunctionRegistryTests extends ESTestCase { .buildResolved(randomTimeZone(), def)); assertThat(e.getMessage(), endsWith("expects exactly two arguments")); } + + public void testAliasNameIsTheSameAsAFunctionName() { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new, "ALIAS")); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + r.addToMap(def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION"))); + assertEquals(iae.getMessage(), "alias [DUMMY_FUNCTION] is used by [DUMMY_FUNCTION] and [DUMMY_FUNCTION2]"); + } + + public void testDuplicateAliasInTwoDifferentFunctionsFromTheSameBatch() { + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + new FunctionRegistry(def(DummyFunction.class, DummyFunction::new, "ALIAS"), + def(DummyFunction2.class, DummyFunction2::new, "ALIAS"))); + assertEquals(iae.getMessage(), "alias [ALIAS] is used by [DUMMY_FUNCTION(ALIAS)] and [DUMMY_FUNCTION2]"); + } + + public void testDuplicateAliasInTwoDifferentFunctionsFromTwoDifferentBatches() { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new, "ALIAS")); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + r.addToMap(def(DummyFunction2.class, DummyFunction2::new, "ALIAS"))); + assertEquals(iae.getMessage(), "alias [ALIAS] is used by [DUMMY_FUNCTION] and [DUMMY_FUNCTION2]"); + } public void testFunctionResolving() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry( - Collections.singletonList(def(DummyFunction.class, (Location l, Expression e) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e) -> { assertSame(e, ur.children().get(0)); return new DummyFunction(l); - }, "DUMMY_FUNC"))); + }, "DUMMY_FUNC")); // Resolve by primary name FunctionDefinition def = r.resolveFunction(r.resolveAlias("DuMMy_FuncTIon")); @@ -241,4 +256,10 @@ public class FunctionRegistryTests extends ESTestCase { return null; } } + + public static class DummyFunction2 extends DummyFunction { + public DummyFunction2(Location location) { + super(location); + } + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java index d3336ec89a8..b43f1bae51a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java @@ -202,4 +202,17 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas stringCharInputValidation(proc); } + + public void testOctetLength() { + StringProcessor proc = new StringProcessor(StringOperation.OCTET_LENGTH); + assertNull(proc.process(null)); + assertEquals(7, proc.process("foo bar")); + assertEquals(0, proc.process("")); + assertEquals(1, proc.process('f')); + assertEquals(3, proc.process('\u20ac')); // euro symbol + // euro (3), lamda (2), theta (2), 'white sun with rays' (3), math 'A' (4) symbols + assertEquals(14, proc.process("\u20ac\u039B\u03F4\u263C\u1D400")); + + stringCharInputValidation(proc); + } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json index 77eb89c00f9..f93fff6eaab 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json @@ -15,8 +15,13 @@ "params": { "force": { "type": "boolean", - "required": false, - "description": "True if the job should be forcefully deleted" + "description": "True if the job should be forcefully deleted", + "default": false + }, + "wait_for_completion": { + "type": "boolean", + "description": "Should this request wait until the operation has completed before returning", + "default": true } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml index a0f79b7caba..bf2f3bcec1c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml @@ -172,7 +172,7 @@ setup: "job_id":"datafeeds-crud-1", "indexes":["index-foo"], "types":["type-bar"], - "query":{"match_all_mispelled":{}} + "query":{"match_all_misspelled":{}} } --- diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml index 6a0414fe9dd..549305579ed 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml @@ -36,6 +36,9 @@ - match: { mappings.sourcetype.type: keyword } - match: { mappings.time.type: date } - match: { mappings.time.format: epoch_second } + - match: { ingest_pipeline.description: "Ingest pipeline created by file structure finder" } + - match: { ingest_pipeline.processors.0.date.field: time } + - match: { ingest_pipeline.processors.0.date.formats.0: UNIX } - match: { field_stats.airline.count: 3 } - match: { field_stats.airline.cardinality: 2 } - match: { field_stats.responsetime.count: 3 } @@ -93,6 +96,9 @@ - match: { mappings.sourcetype.type: keyword } - match: { mappings.time.type: date } - match: { mappings.time.format: epoch_second } + - match: { ingest_pipeline.description: "Ingest pipeline created by file structure finder" } + - match: { ingest_pipeline.processors.0.date.field: time } + - match: { ingest_pipeline.processors.0.date.formats.0: UNIX } - match: { field_stats.airline.count: 3 } - match: { field_stats.airline.cardinality: 2 } - match: { field_stats.responsetime.count: 3 } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml index b1d4158ac15..9f065bb5522 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml @@ -34,7 +34,7 @@ setup: # read the monitoring indices. - do: xpack.security.put_role: - name: "unkown_agent_role" + name: "unknown_agent_role" body: > { "cluster": ["monitor"], @@ -51,7 +51,7 @@ setup: body: > { "password": "s3krit", - "roles" : [ "unkown_agent_role" ] + "roles" : [ "unknown_agent_role" ] } --- @@ -70,7 +70,7 @@ teardown: ignore: 404 - do: xpack.security.delete_role: - name: "unkown_agent_role" + name: "unknown_agent_role" ignore: 404 --- diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index 298cf27fa2f..e5c7c762340 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -177,7 +177,7 @@ setup: - is_true: acknowledged --- -"Test delete non-existant job": +"Test delete non-existent job": - do: catch: /the task with id does_not_exist doesn't exist/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml index 7adba9035eb..38a357bcd68 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml @@ -37,7 +37,7 @@ setup: } --- -"Test start non-existant job": +"Test start nonexistent job": - do: catch: /Task for Rollup Job \[does_not_exist\] not found/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml index 42a1dea8163..849aca3332d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml @@ -37,7 +37,7 @@ setup: } --- -"Test stop non-existant job": +"Test stop nonexistent job": - do: catch: /Task for Rollup Job \[does_not_exist\] not found/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml index 0e42a13b8fd..24ea7c03c80 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml @@ -91,7 +91,7 @@ teardown: ignore: 404 --- -"Test shared index seperating user by using DLS": +"Test shared index separating user by using DLS": - do: headers: Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml index 5b2d00235c5..2f3a8153464 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml @@ -12,7 +12,7 @@ teardown: ignore: 404 --- -"Ensure that ack status is reset after unsuccesful execution": +"Ensure that ack status is reset after unsuccessful execution": - do: xpack.watcher.put_watch: diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index 74eb029e916..56a17674b49 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -47,7 +47,7 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { public void testIndexUpgradeInfoLicense() throws Exception { // This test disables all licenses and generates a new one using dev private key - // in non-snapshot builds we are using produciton public key for license verification + // in non-snapshot builds we are using production public key for license verification // which makes this test to fail assumeTrue("License is only valid when tested against snapshot/test keys", Build.CURRENT.isSnapshot()); assertAcked(client().admin().indices().prepareCreate("test").get()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index eaf64e6ef8f..5eaaa5e2ed5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -311,32 +311,32 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa final ConditionRegistry conditionRegistry = new ConditionRegistry(Collections.unmodifiableMap(parsers), getClock()); final Map transformFactories = new HashMap<>(); - transformFactories.put(ScriptTransform.TYPE, new ScriptTransformFactory(settings, scriptService)); + transformFactories.put(ScriptTransform.TYPE, new ScriptTransformFactory(scriptService)); transformFactories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, client, xContentRegistry, scriptService)); final TransformRegistry transformRegistry = new TransformRegistry(Collections.unmodifiableMap(transformFactories)); // actions final Map actionFactoryMap = new HashMap<>(); actionFactoryMap.put(EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, emailAttachmentsParser)); - actionFactoryMap.put(WebhookAction.TYPE, new WebhookActionFactory(settings, httpClient, templateEngine)); + actionFactoryMap.put(WebhookAction.TYPE, new WebhookActionFactory(httpClient, templateEngine)); actionFactoryMap.put(IndexAction.TYPE, new IndexActionFactory(settings, client)); actionFactoryMap.put(LoggingAction.TYPE, new LoggingActionFactory(templateEngine)); - actionFactoryMap.put(HipChatAction.TYPE, new HipChatActionFactory(settings, templateEngine, hipChatService)); - actionFactoryMap.put(JiraAction.TYPE, new JiraActionFactory(settings, templateEngine, jiraService)); - actionFactoryMap.put(SlackAction.TYPE, new SlackActionFactory(settings, templateEngine, slackService)); - actionFactoryMap.put(PagerDutyAction.TYPE, new PagerDutyActionFactory(settings, templateEngine, pagerDutyService)); + actionFactoryMap.put(HipChatAction.TYPE, new HipChatActionFactory(templateEngine, hipChatService)); + actionFactoryMap.put(JiraAction.TYPE, new JiraActionFactory(templateEngine, jiraService)); + actionFactoryMap.put(SlackAction.TYPE, new SlackActionFactory(templateEngine, slackService)); + actionFactoryMap.put(PagerDutyAction.TYPE, new PagerDutyActionFactory(templateEngine, pagerDutyService)); final ActionRegistry registry = new ActionRegistry(actionFactoryMap, conditionRegistry, transformRegistry, getClock(), getLicenseState()); // inputs final Map inputFactories = new HashMap<>(); inputFactories.put(SearchInput.TYPE, new SearchInputFactory(settings, client, xContentRegistry, scriptService)); - inputFactories.put(SimpleInput.TYPE, new SimpleInputFactory(settings)); + inputFactories.put(SimpleInput.TYPE, new SimpleInputFactory()); inputFactories.put(HttpInput.TYPE, new HttpInputFactory(settings, httpClient, templateEngine)); - inputFactories.put(NoneInput.TYPE, new NoneInputFactory(settings)); - inputFactories.put(TransformInput.TYPE, new TransformInputFactory(settings, transformRegistry)); - final InputRegistry inputRegistry = new InputRegistry(settings, inputFactories); - inputFactories.put(ChainInput.TYPE, new ChainInputFactory(settings, inputRegistry)); + inputFactories.put(NoneInput.TYPE, new NoneInputFactory()); + inputFactories.put(TransformInput.TYPE, new TransformInputFactory(transformRegistry)); + final InputRegistry inputRegistry = new InputRegistry(inputFactories); + inputFactories.put(ChainInput.TYPE, new ChainInputFactory(inputRegistry)); bulkProcessor = BulkProcessor.builder(ClientHelper.clientWithOrigin(client, WATCHER_ORIGIN), new BulkProcessor.Listener() { @Override @@ -438,7 +438,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa } protected Consumer> getTriggerEngineListener(ExecutionService executionService) { - return new AsyncTriggerEventConsumer(settings, executionService); + return new AsyncTriggerEventConsumer(executionService); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index f3b77b922aa..086528054bc 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -143,7 +143,7 @@ final class WatcherIndexingListener extends AbstractComponent implements Indexin * * @param shardId The shard id object of the document being processed * @param index The index operation - * @param ex The exception occured during indexing + * @param ex The exception occurred during indexing */ @Override public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionFactory.java index c2dae2855fb..44ad7952d9c 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionFactory.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.email; -import org.elasticsearch.common.logging.Loggers; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; @@ -25,7 +25,7 @@ public class EmailActionFactory extends ActionFactory { public EmailActionFactory(Settings settings, EmailService emailService, TextTemplateEngine templateEngine, EmailAttachmentsParser emailAttachmentsParser) { - super(Loggers.getLogger(ExecutableEmailAction.class, settings)); + super(LogManager.getLogger(ExecutableEmailAction.class)); this.emailService = emailService; this.templateEngine = templateEngine; this.htmlSanitizer = new HtmlSanitizer(settings); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactory.java index 081dc2e331b..6b9f053d1db 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactory.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.hipchat; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; @@ -20,8 +19,8 @@ public class HipChatActionFactory extends ActionFactory { private final TextTemplateEngine templateEngine; private final HipChatService hipchatService; - public HipChatActionFactory(Settings settings, TextTemplateEngine templateEngine, HipChatService hipchatService) { - super(Loggers.getLogger(ExecutableHipChatAction.class, settings)); + public HipChatActionFactory(TextTemplateEngine templateEngine, HipChatService hipchatService) { + super(LogManager.getLogger(ExecutableHipChatAction.class)); this.templateEngine = templateEngine; this.hipchatService = hipchatService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionFactory.java index 7f9b13a6de9..29e6160cdd4 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionFactory.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.watcher.actions.index; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.Client; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; @@ -21,7 +21,7 @@ public class IndexActionFactory extends ActionFactory { private final TimeValue bulkDefaultTimeout; public IndexActionFactory(Settings settings, Client client) { - super(Loggers.getLogger(IndexActionFactory.class, settings)); + super(LogManager.getLogger(IndexActionFactory.class)); this.client = client; this.indexDefaultTimeout = settings.getAsTime("xpack.watcher.actions.index.default_timeout", TimeValue.timeValueSeconds(30)); this.bulkDefaultTimeout = settings.getAsTime("xpack.watcher.actions.bulk.default_timeout", TimeValue.timeValueMinutes(1)); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionFactory.java index 3d2184283a5..ac95f7fe8a4 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionFactory.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.jira; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; @@ -19,8 +18,8 @@ public class JiraActionFactory extends ActionFactory { private final TextTemplateEngine templateEngine; private final JiraService jiraService; - public JiraActionFactory(Settings settings, TextTemplateEngine templateEngine, JiraService jiraService) { - super(Loggers.getLogger(ExecutableJiraAction.class, settings)); + public JiraActionFactory(TextTemplateEngine templateEngine, JiraService jiraService) { + super(LogManager.getLogger(ExecutableJiraAction.class)); this.templateEngine = templateEngine; this.jiraService = jiraService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactory.java index 5cd18af3af6..703c9eaac4d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactory.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.pagerduty; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; @@ -19,8 +18,8 @@ public class PagerDutyActionFactory extends ActionFactory { private final TextTemplateEngine templateEngine; private final PagerDutyService pagerDutyService; - public PagerDutyActionFactory(Settings settings, TextTemplateEngine templateEngine, PagerDutyService pagerDutyService) { - super(Loggers.getLogger(ExecutablePagerDutyAction.class, settings)); + public PagerDutyActionFactory(TextTemplateEngine templateEngine, PagerDutyService pagerDutyService) { + super(LogManager.getLogger(ExecutablePagerDutyAction.class)); this.templateEngine = templateEngine; this.pagerDutyService = pagerDutyService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactory.java index 8392976f273..45d94b894f1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactory.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.slack; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; @@ -18,8 +17,8 @@ public class SlackActionFactory extends ActionFactory { private final TextTemplateEngine templateEngine; private final SlackService slackService; - public SlackActionFactory(Settings settings, TextTemplateEngine templateEngine, SlackService slackService) { - super(Loggers.getLogger(ExecutableSlackAction.class, settings)); + public SlackActionFactory(TextTemplateEngine templateEngine, SlackService slackService) { + super(LogManager.getLogger(ExecutableSlackAction.class)); this.templateEngine = templateEngine; this.slackService = slackService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionFactory.java index 5a7a1069932..a600acd09bf 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionFactory.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.webhook; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.common.http.HttpClient; @@ -19,9 +18,8 @@ public class WebhookActionFactory extends ActionFactory { private final HttpClient httpClient; private final TextTemplateEngine templateEngine; - public WebhookActionFactory(Settings settings, HttpClient httpClient, TextTemplateEngine templateEngine) { - - super(Loggers.getLogger(ExecutableWebhookAction.class, settings)); + public WebhookActionFactory(HttpClient httpClient, TextTemplateEngine templateEngine) { + super(LogManager.getLogger(ExecutableWebhookAction.class)); this.httpClient = httpClient; this.templateEngine = templateEngine; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/AsyncTriggerEventConsumer.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/AsyncTriggerEventConsumer.java index 61a34e554d2..bd48534589e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/AsyncTriggerEventConsumer.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/AsyncTriggerEventConsumer.java @@ -6,10 +6,9 @@ package org.elasticsearch.xpack.watcher.execution; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import java.util.function.Consumer; @@ -17,12 +16,10 @@ import java.util.function.Consumer; import static java.util.stream.StreamSupport.stream; public class AsyncTriggerEventConsumer implements Consumer> { - - private final Logger logger; + private static final Logger logger = LogManager.getLogger(SyncTriggerEventConsumer.class); private final ExecutionService executionService; - public AsyncTriggerEventConsumer(Settings settings, ExecutionService executionService) { - this.logger = Loggers.getLogger(SyncTriggerEventConsumer.class, settings); + public AsyncTriggerEventConsumer(ExecutionService executionService) { this.executionService = executionService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 3507bd4eb36..7a0b7f14bca 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -122,7 +122,7 @@ public class ExecutionService extends AbstractComponent { /** * Pause the execution of the watcher executor, and empty the state. - * Pausing means, that no new watch executions will be done unless this pausing is explicitely unset. + * Pausing means, that no new watch executions will be done unless this pausing is explicitly unset. * This is important when watcher is stopped, so that scheduled watches do not accidentally get executed. * This should not be used when we need to reload watcher based on some cluster state changes, then just calling * {@link #clearExecutionsAndQueue()} is the way to go @@ -338,7 +338,7 @@ public class ExecutionService extends AbstractComponent { public void updateWatchStatus(Watch watch) throws IOException { // at the moment we store the status together with the watch, // so we just need to update the watch itself - // we do not want to update the status.state field, as it might have been deactivated inbetween + // we do not want to update the status.state field, as it might have been deactivated in-between Map parameters = MapBuilder.newMapBuilder() .put(Watch.INCLUDE_STATUS_KEY, "true") .put(WatchStatus.INCLUDE_STATE, "false") diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/SyncTriggerEventConsumer.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/SyncTriggerEventConsumer.java index 7608ad2908f..957e988cfdc 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/SyncTriggerEventConsumer.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/SyncTriggerEventConsumer.java @@ -6,10 +6,9 @@ package org.elasticsearch.xpack.watcher.execution; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import java.util.function.Consumer; @@ -17,12 +16,11 @@ import java.util.function.Consumer; import static java.util.stream.StreamSupport.stream; public class SyncTriggerEventConsumer implements Consumer> { + private static final Logger logger = LogManager.getLogger(SyncTriggerEventConsumer.class); private final ExecutionService executionService; - private final Logger logger; - public SyncTriggerEventConsumer(Settings settings, ExecutionService executionService) { - this.logger = Loggers.getLogger(SyncTriggerEventConsumer.class, settings); + public SyncTriggerEventConsumer(ExecutionService executionService) { this.executionService = executionService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputFactory.java index 25deb2227c6..c154293b560 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputFactory.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.input; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; import org.elasticsearch.xpack.core.watcher.input.Input; @@ -16,13 +15,6 @@ import java.io.IOException; * Parses xcontent to a concrete input of the same type. */ public abstract class InputFactory> { - - protected final Logger inputLogger; - - public InputFactory(Logger inputLogger) { - this.inputLogger = inputLogger; - } - /** * @return The type of the input */ diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java index 732653d8293..87bd8f3bdd5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.input; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; import org.elasticsearch.xpack.watcher.input.chain.ChainInput; @@ -21,9 +20,9 @@ public class InputRegistry { private final Map factories; - public InputRegistry(Settings settings, Map factories) { + public InputRegistry(Map factories) { Map map = new HashMap<>(factories); - map.put(ChainInput.TYPE, new ChainInputFactory(settings, this)); + map.put(ChainInput.TYPE, new ChainInputFactory(this)); this.factories = Collections.unmodifiableMap(map); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputFactory.java index ac3f7b820c2..06d49efcc65 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputFactory.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.input.chain; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; import org.elasticsearch.xpack.core.watcher.input.Input; @@ -22,8 +20,7 @@ public class ChainInputFactory extends InputFactory(tuple.v1(), executableInput)); } - return new ExecutableChainInput(input, executableInputs, inputLogger); + return new ExecutableChainInput(input, executableInputs); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInput.java index 2643876bdb6..9b959e7c017 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInput.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.input.chain; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; @@ -20,11 +21,12 @@ import java.util.Map; import static org.elasticsearch.xpack.watcher.input.chain.ChainInput.TYPE; public class ExecutableChainInput extends ExecutableInput { + private static final Logger logger = LogManager.getLogger(ExecutableChainInput.class); private List> inputs; - public ExecutableChainInput(ChainInput input, List> inputs, Logger logger) { - super(input, logger); + public ExecutableChainInput(ChainInput input, List> inputs) { + super(input); this.inputs = inputs; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java index 5d738772f21..79d3918f7a2 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.input.http; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -31,12 +32,13 @@ import java.util.Map; import static org.elasticsearch.xpack.watcher.input.http.HttpInput.TYPE; public class ExecutableHttpInput extends ExecutableInput { + private static final Logger logger = LogManager.getLogger(ExecutableHttpInput.class); private final HttpClient client; private final TextTemplateEngine templateEngine; - public ExecutableHttpInput(HttpInput input, Logger logger, HttpClient client, TextTemplateEngine templateEngine) { - super(input, logger); + public ExecutableHttpInput(HttpInput input, HttpClient client, TextTemplateEngine templateEngine) { + super(input); this.client = client; this.templateEngine = templateEngine; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInputFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInputFactory.java index 7a68a77ddd2..f50c08ffe40 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInputFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInputFactory.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.input.http; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.common.http.HttpClient; @@ -20,7 +19,6 @@ public final class HttpInputFactory extends InputFactory { - public ExecutableNoneInput(Logger logger) { - super(NoneInput.INSTANCE, logger); + public ExecutableNoneInput() { + super(NoneInput.INSTANCE); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInputFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInputFactory.java index b44d788ae56..370d7360c4a 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInputFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInputFactory.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.watcher.input.none; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.input.none.NoneInput; import org.elasticsearch.xpack.watcher.input.InputFactory; @@ -14,11 +12,6 @@ import org.elasticsearch.xpack.watcher.input.InputFactory; import java.io.IOException; public class NoneInputFactory extends InputFactory { - - public NoneInputFactory(Settings settings) { - super(Loggers.getLogger(ExecutableNoneInput.class, settings)); - } - @Override public String type() { return NoneInput.TYPE; @@ -31,6 +24,6 @@ public class NoneInputFactory extends InputFactory { - public ExecutableSimpleInput(SimpleInput input, Logger logger) { - super(input, logger); + public ExecutableSimpleInput(SimpleInput input) { + super(input); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputFactory.java index 91089f165dd..e431167d791 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputFactory.java @@ -5,19 +5,12 @@ */ package org.elasticsearch.xpack.watcher.input.simple; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.input.InputFactory; import java.io.IOException; public class SimpleInputFactory extends InputFactory { - - public SimpleInputFactory(Settings settings) { - super(Loggers.getLogger(ExecutableSimpleInput.class, settings)); - } - @Override public String type() { return SimpleInput.TYPE; @@ -30,6 +23,6 @@ public class SimpleInputFactory extends InputFactory(HipChatService.getSettings()))); - factory = new HipChatActionFactory(Settings.EMPTY, mock(TextTemplateEngine.class), hipchatService); + factory = new HipChatActionFactory(mock(TextTemplateEngine.class), hipchatService); HipChatAction action = hipchatAction("_unknown", "_body").build(); XContentBuilder jsonBuilder = jsonBuilder().value(action); XContentParser parser = createParser(jsonBuilder); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactoryTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactoryTests.java index e48dab8b36d..cef91c69358 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactoryTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactoryTests.java @@ -31,7 +31,7 @@ public class PagerDutyActionFactoryTests extends ESTestCase { @Before public void init() throws Exception { service = mock(PagerDutyService.class); - factory = new PagerDutyActionFactory(Settings.EMPTY, mock(TextTemplateEngine.class), service); + factory = new PagerDutyActionFactory(mock(TextTemplateEngine.class), service); } public void testParseAction() throws Exception { @@ -49,7 +49,7 @@ public class PagerDutyActionFactoryTests extends ESTestCase { } public void testParseActionUnknownAccount() throws Exception { - factory = new PagerDutyActionFactory(Settings.EMPTY, mock(TextTemplateEngine.class), new PagerDutyService(Settings.EMPTY, null, + factory = new PagerDutyActionFactory(mock(TextTemplateEngine.class), new PagerDutyService(Settings.EMPTY, null, new ClusterSettings(Settings.EMPTY, new HashSet<>(PagerDutyService.getSettings())))); PagerDutyAction action = triggerPagerDutyAction("_unknown", "_body").build(); XContentBuilder jsonBuilder = jsonBuilder().value(action); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactoryTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactoryTests.java index 3b3e7fa981b..031f451c79a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactoryTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionFactoryTests.java @@ -31,7 +31,7 @@ public class SlackActionFactoryTests extends ESTestCase { @Before public void init() throws Exception { service = mock(SlackService.class); - factory = new SlackActionFactory(Settings.EMPTY, mock(TextTemplateEngine.class), service); + factory = new SlackActionFactory(mock(TextTemplateEngine.class), service); } public void testParseAction() throws Exception { @@ -50,7 +50,7 @@ public class SlackActionFactoryTests extends ESTestCase { public void testParseActionUnknownAccount() throws Exception { SlackService service = new SlackService(Settings.EMPTY, null, new ClusterSettings(Settings.EMPTY, new HashSet<>(SlackService.getSettings()))); - factory = new SlackActionFactory(Settings.EMPTY, mock(TextTemplateEngine.class), service); + factory = new SlackActionFactory(mock(TextTemplateEngine.class), service); SlackAction action = slackAction("_unknown", createRandomTemplate()).build(); XContentBuilder jsonBuilder = jsonBuilder().value(action); XContentParser parser = createParser(jsonBuilder); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java index 29eaece9037..ce7ed9fdbde 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java @@ -117,7 +117,7 @@ public class SlackActionTests extends ESTestCase { hasError = true; break; case 1: - when(response.status()).thenReturn(randomIntBetween(300, 600)); // error reponse + when(response.status()).thenReturn(randomIntBetween(300, 600)); // error response messages.add(SentMessages.SentMessage.responded(randomAlphaOfLength(10), message, request, response)); hasError = true; break; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java index 755e3dffe5c..511fcd7698e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java @@ -208,7 +208,7 @@ public class WebhookActionTests extends ESTestCase { } private WebhookActionFactory webhookFactory(HttpClient client) { - return new WebhookActionFactory(Settings.EMPTY, client, templateEngine); + return new WebhookActionFactory(client, templateEngine); } public void testThatSelectingProxyWorks() throws Exception { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 0ba733ea4c5..4e62eedd221 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -58,7 +58,7 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest public void startWebservice() throws Exception { Settings settings = getInstanceFromMaster(Settings.class); TestsSSLService sslService = new TestsSSLService(settings, getInstanceFromMaster(Environment.class)); - webServer = new MockWebServer(sslService.sslContext(settings.getByPrefix("xpack.http.ssl.")), false); + webServer = new MockWebServer(sslService.sslContext("xpack.http.ssl"), false); webServer.start(); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index a8330743c0a..519dbbeee86 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -522,7 +522,7 @@ public class HttpClientTests extends ESTestCase { }); HttpRequest request = HttpRequest.builder("localhost", serverSocket.getLocalPort()).path("/").build(); expectThrows(ClientProtocolException.class, () -> httpClient.execute(request)); - assertThat("A server side exception occured, but shouldn't", hasExceptionHappened.get(), is(nullValue())); + assertThat("A server side exception occurred, but shouldn't", hasExceptionHappened.get(), is(nullValue())); } finally { terminate(executor); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index d3f46d3d452..069dca8f2b1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -1015,7 +1015,7 @@ public class ExecutionServiceTests extends ESTestCase { public void testUpdateWatchStatusDoesNotUpdateState() throws Exception { WatchStatus status = new WatchStatus(DateTime.now(UTC), Collections.emptyMap()); - Watch watch = new Watch("_id", new ManualTrigger(), new ExecutableNoneInput(logger), InternalAlwaysCondition.INSTANCE, null, null, + Watch watch = new Watch("_id", new ManualTrigger(), new ExecutableNoneInput(), InternalAlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, status, 1L); final AtomicBoolean assertionsTriggered = new AtomicBoolean(false); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/InputRegistryTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/InputRegistryTests.java index 83c3457c4f6..8347140a561 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/InputRegistryTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/InputRegistryTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.input; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; @@ -17,7 +16,7 @@ import static org.hamcrest.Matchers.containsString; public class InputRegistryTests extends ESTestCase { public void testParseEmptyInput() throws Exception { - InputRegistry registry = new InputRegistry(Settings.EMPTY, emptyMap()); + InputRegistry registry = new InputRegistry(emptyMap()); XContentParser parser = createParser(jsonBuilder().startObject().endObject()); parser.nextToken(); try { @@ -29,7 +28,7 @@ public class InputRegistryTests extends ESTestCase { } public void testParseArrayInput() throws Exception { - InputRegistry registry = new InputRegistry(Settings.EMPTY, emptyMap()); + InputRegistry registry = new InputRegistry(emptyMap()); XContentParser parser = createParser(jsonBuilder().startArray().endArray()); parser.nextToken(); try { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java index 2e1e7858f16..278a2c08d7b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -63,11 +62,11 @@ public class ChainInputTests extends ESTestCase { */ public void testThatExecutionWorks() throws Exception { Map factories = new HashMap<>(); - factories.put("simple", new SimpleInputFactory(Settings.EMPTY)); + factories.put("simple", new SimpleInputFactory()); // hackedy hack... - InputRegistry inputRegistry = new InputRegistry(Settings.EMPTY, factories); - ChainInputFactory chainInputFactory = new ChainInputFactory(Settings.EMPTY, inputRegistry); + InputRegistry inputRegistry = new InputRegistry(factories); + ChainInputFactory chainInputFactory = new ChainInputFactory(inputRegistry); factories.put("chain", chainInputFactory); XContentBuilder builder = jsonBuilder().startObject().startArray("inputs") @@ -88,7 +87,7 @@ public class ChainInputTests extends ESTestCase { // now execute ExecutableChainInput executableChainInput = chainInputFactory.createExecutable(chainInput); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); ChainInput.Result result = executableChainInput.execute(ctx, new Payload.Simple()); Payload payload = result.payload(); assertThat(payload.data(), hasKey("first")); @@ -117,10 +116,10 @@ public class ChainInputTests extends ESTestCase { // parsing it back as well! Map factories = new HashMap<>(); - factories.put("simple", new SimpleInputFactory(Settings.EMPTY)); + factories.put("simple", new SimpleInputFactory()); - InputRegistry inputRegistry = new InputRegistry(Settings.EMPTY, factories); - ChainInputFactory chainInputFactory = new ChainInputFactory(Settings.EMPTY, inputRegistry); + InputRegistry inputRegistry = new InputRegistry(factories); + ChainInputFactory chainInputFactory = new ChainInputFactory(inputRegistry); factories.put("chain", chainInputFactory); XContentParser parser = createParser(builder); @@ -177,10 +176,10 @@ public class ChainInputTests extends ESTestCase { */ public void testParsingShouldBeStrictWhenClosingInputs() throws Exception { Map factories = new HashMap<>(); - factories.put("simple", new SimpleInputFactory(Settings.EMPTY)); + factories.put("simple", new SimpleInputFactory()); - InputRegistry inputRegistry = new InputRegistry(Settings.EMPTY, factories); - ChainInputFactory chainInputFactory = new ChainInputFactory(Settings.EMPTY, inputRegistry); + InputRegistry inputRegistry = new InputRegistry(factories); + ChainInputFactory chainInputFactory = new ChainInputFactory(inputRegistry); factories.put("chain", chainInputFactory); XContentBuilder builder = jsonBuilder().startObject().startArray("inputs").startObject() @@ -206,10 +205,10 @@ public class ChainInputTests extends ESTestCase { */ public void testParsingShouldBeStrictWhenStartingInputs() throws Exception { Map factories = new HashMap<>(); - factories.put("simple", new SimpleInputFactory(Settings.EMPTY)); + factories.put("simple", new SimpleInputFactory()); - InputRegistry inputRegistry = new InputRegistry(Settings.EMPTY, factories); - ChainInputFactory chainInputFactory = new ChainInputFactory(Settings.EMPTY, inputRegistry); + InputRegistry inputRegistry = new InputRegistry(factories); + ChainInputFactory chainInputFactory = new ChainInputFactory(inputRegistry); factories.put("chain", chainInputFactory); XContentBuilder builder = jsonBuilder().startObject().startArray("inputs") diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInputTests.java index 0ae6e0b3c96..03b9b1d993b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ExecutableChainInputTests.java @@ -32,7 +32,7 @@ public class ExecutableChainInputTests extends ESTestCase { ChainInput chainInput = new ChainInput(Arrays.asList(new Tuple<>("whatever", new SimpleInput(Payload.EMPTY)))); Tuple tuple = new Tuple<>("whatever", new FailingExecutableInput()); - ExecutableChainInput executableChainInput = new ExecutableChainInput(chainInput, Arrays.asList(tuple), logger); + ExecutableChainInput executableChainInput = new ExecutableChainInput(chainInput, Arrays.asList(tuple)); ChainInput.Result result = executableChainInput.execute(ctx, Payload.EMPTY); assertThat(result.status(), is(Status.SUCCESS)); } @@ -40,7 +40,7 @@ public class ExecutableChainInputTests extends ESTestCase { private class FailingExecutableInput extends ExecutableInput { protected FailingExecutableInput() { - super(new SimpleInput(Payload.EMPTY), ExecutableChainInputTests.this.logger); + super(new SimpleInput(Payload.EMPTY)); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java index 7d81ff2203d..c29b3e1f8b4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java @@ -113,11 +113,11 @@ public class HttpInputTests extends ESTestCase { break; } - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); when(httpClient.execute(any(HttpRequest.class))).thenReturn(response); when(templateEngine.render(eq(new TextTemplate("_body")), any(Map.class))).thenReturn("_body"); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); assertThat(result.type(), equalTo(HttpInput.TYPE)); assertThat(result.payload().data(), hasEntry("key", "value")); @@ -130,13 +130,13 @@ public class HttpInputTests extends ESTestCase { .method(HttpMethod.POST) .body("_body"); HttpInput httpInput = InputBuilders.httpInput(request.build()).expectedResponseXContentType(HttpContentType.TEXT).build(); - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); String notJson = "This is not json"; HttpResponse response = new HttpResponse(123, notJson.getBytes(StandardCharsets.UTF_8)); when(httpClient.execute(any(HttpRequest.class))).thenReturn(response); when(templateEngine.render(eq(new TextTemplate("_body")), any(Map.class))).thenReturn("_body"); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); assertThat(result.type(), equalTo(HttpInput.TYPE)); assertThat(result.payload().data().get("_value").toString(), equalTo(notJson)); @@ -232,7 +232,7 @@ public class HttpInputTests extends ESTestCase { HttpRequestTemplate.Builder request = HttpRequestTemplate.builder("localhost", 8080); HttpInput httpInput = InputBuilders.httpInput(request.build()).build(); - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); Map responseHeaders = new HashMap<>(); responseHeaders.put(headerName, new String[] { headerValue }); @@ -248,7 +248,7 @@ public class HttpInputTests extends ESTestCase { when(templateEngine.render(eq(new TextTemplate("_body")), any(Map.class))).thenReturn("_body"); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); assertThat(result.type(), equalTo(HttpInput.TYPE)); @@ -264,7 +264,7 @@ public class HttpInputTests extends ESTestCase { public void testThatExpectedContentTypeOverridesReturnedContentType() throws Exception { HttpRequestTemplate template = HttpRequestTemplate.builder("http:://127.0.0.1:12345").build(); HttpInput httpInput = new HttpInput(template, HttpContentType.TEXT, null); - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); Map headers = new HashMap<>(1); String contentType = randomFrom("application/json", "application/json; charset=UTF-8", "text/html", "application/yaml", @@ -274,7 +274,7 @@ public class HttpInputTests extends ESTestCase { HttpResponse httpResponse = new HttpResponse(200, body, headers); when(httpClient.execute(any())).thenReturn(httpResponse); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, Payload.EMPTY); assertThat(result.payload().data(), hasEntry("_value", body)); assertThat(result.payload().data(), not(hasKey("foo"))); @@ -286,9 +286,9 @@ public class HttpInputTests extends ESTestCase { HttpRequestTemplate.Builder request = HttpRequestTemplate.builder("localhost", 8080); HttpInput httpInput = InputBuilders.httpInput(request.build()).build(); - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); assertThat(result.statusCode, is(200)); assertThat(result.payload().data(), hasKey("_status_code")); @@ -303,9 +303,9 @@ public class HttpInputTests extends ESTestCase { HttpRequestTemplate.Builder request = HttpRequestTemplate.builder("localhost", 8080); HttpInput httpInput = InputBuilders.httpInput(request.build()).build(); - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); assertThat(result.statusCode, is(200)); assertThat(result.payload().data(), not(hasKey("_value"))); @@ -322,9 +322,9 @@ public class HttpInputTests extends ESTestCase { HttpRequestTemplate.Builder request = HttpRequestTemplate.builder("localhost", 8080); HttpInput httpInput = InputBuilders.httpInput(request.build()).build(); - ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); + ExecutableHttpInput input = new ExecutableHttpInput(httpInput, httpClient, templateEngine); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); assertThat(result.getException(), is(notNullValue())); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputTests.java index b435e7ba38a..d690cfe15f3 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/simple/SimpleInputTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.input.simple; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; @@ -29,7 +28,7 @@ public class SimpleInputTests extends ESTestCase { Map data = new HashMap<>(); data.put("foo", "bar"); data.put("baz", new ArrayList() ); - ExecutableInput staticInput = new ExecutableSimpleInput(new SimpleInput(new Payload.Simple(data)), logger); + ExecutableInput staticInput = new ExecutableSimpleInput(new SimpleInput(new Payload.Simple(data))); Input.Result staticResult = staticInput.execute(null, new Payload.Simple()); assertEquals(staticResult.payload().data().get("foo"), "bar"); @@ -43,7 +42,7 @@ public class SimpleInputTests extends ESTestCase { data.put("baz", new ArrayList()); XContentBuilder jsonBuilder = jsonBuilder().map(data); - InputFactory parser = new SimpleInputFactory(Settings.builder().build()); + InputFactory parser = new SimpleInputFactory(); XContentParser xContentParser = createParser(jsonBuilder); xContentParser.nextToken(); ExecutableInput input = parser.parseExecutable("_id", xContentParser); @@ -59,7 +58,7 @@ public class SimpleInputTests extends ESTestCase { public void testParserInvalid() throws Exception { XContentBuilder jsonBuilder = jsonBuilder().value("just a string"); - InputFactory parser = new SimpleInputFactory(Settings.builder().build()); + InputFactory parser = new SimpleInputFactory(); XContentParser xContentParser = createParser(jsonBuilder); xContentParser.nextToken(); try { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java index 0ac5932586e..72f473a611d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.input.transform; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -52,7 +51,7 @@ public class TransformInputTests extends ESTestCase { TransformInput transformInput = new TransformInput(scriptTransform); ExecutableTransform executableTransform = new ExecutableScriptTransform(scriptTransform, logger, scriptService); - ExecutableInput input = new ExecutableTransformInput(transformInput, logger, executableTransform); + ExecutableInput input = new ExecutableTransformInput(transformInput, executableTransform); WatchExecutionContext ctx = WatcherTestUtils.mockExecutionContext("_id", Payload.EMPTY); Input.Result result = input.execute(ctx, new Payload.Simple()); @@ -62,9 +61,9 @@ public class TransformInputTests extends ESTestCase { public void testParserValid() throws Exception { Map transformFactories = Collections.singletonMap("script", - new ScriptTransformFactory(Settings.EMPTY, scriptService)); + new ScriptTransformFactory(scriptService)); TransformRegistry registry = new TransformRegistry(transformFactories); - TransformInputFactory factory = new TransformInputFactory(Settings.EMPTY, registry); + TransformInputFactory factory = new TransformInputFactory(registry); // { "script" : { "lang" : "mockscript", "source" : "1" } } XContentBuilder builder = jsonBuilder().startObject().startObject("script") @@ -86,9 +85,9 @@ public class TransformInputTests extends ESTestCase { XContentBuilder jsonBuilder = jsonBuilder().value("just a string"); Map transformFactories = Collections.singletonMap("script", - new ScriptTransformFactory(Settings.EMPTY, scriptService)); + new ScriptTransformFactory(scriptService)); TransformRegistry registry = new TransformRegistry(transformFactories); - TransformInputFactory factory = new TransformInputFactory(Settings.EMPTY, registry); + TransformInputFactory factory = new TransformInputFactory(registry); XContentParser parser = createParser(jsonBuilder); parser.nextToken(); @@ -105,9 +104,9 @@ public class TransformInputTests extends ESTestCase { public void testTransformInputToXContentIsSameAsParsing() throws Exception { Map transformFactories = Collections.singletonMap("script", - new ScriptTransformFactory(Settings.EMPTY, scriptService)); + new ScriptTransformFactory(scriptService)); TransformRegistry registry = new TransformRegistry(transformFactories); - TransformInputFactory factory = new TransformInputFactory(Settings.EMPTY, registry); + TransformInputFactory factory = new TransformInputFactory(registry); XContentBuilder jsonBuilder = jsonBuilder().startObject().startObject("script") .field("source", "1") diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java index b1c263299f7..99cf45e583d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.watcher.test; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; @@ -30,13 +30,13 @@ import java.util.function.Consumer; import java.util.stream.Stream; public class TimeWarpedWatcher extends LocalStateCompositeXPackPlugin { + private static final Logger logger = LogManager.getLogger(TimeWarpedWatcher.class); // use a single clock across all nodes using this plugin, this lets keep it static private static final ClockMock clock = new ClockMock(); public TimeWarpedWatcher(final Settings settings, final Path configPath) throws Exception { super(settings, configPath); - Logger logger = Loggers.getLogger(TimeWarpedWatcher.class, settings); logger.info("using time warped watchers plugin"); TimeWarpedWatcher thisVar = this; @@ -69,7 +69,7 @@ public class TimeWarpedWatcher extends LocalStateCompositeXPackPlugin { @Override protected Consumer> getTriggerEngineListener(ExecutionService executionService){ - return new SyncTriggerEventConsumer(settings, executionService); + return new SyncTriggerEventConsumer(executionService); } }); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java index 75c6a908b99..bb5a6eabdd5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java @@ -125,10 +125,10 @@ public final class WatcherTestUtils { .buildMock(); } - public static WatchExecutionContext createWatchExecutionContext(Logger logger) throws Exception { + public static WatchExecutionContext createWatchExecutionContext() throws Exception { Watch watch = new Watch("test-watch", new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))), - new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger), + new ExecutableSimpleInput(new SimpleInput(new Payload.Simple())), InternalAlwaysCondition.INSTANCE, null, null, @@ -175,7 +175,7 @@ public final class WatcherTestUtils { return new Watch( watchName, new ScheduleTrigger(new CronSchedule("0/5 * * * * ? *")), - new ExecutableSimpleInput(new SimpleInput(new Payload.Simple(Collections.singletonMap("bar", "foo"))), logger), + new ExecutableSimpleInput(new SimpleInput(new Payload.Simple(Collections.singletonMap("bar", "foo")))), InternalAlwaysCondition.INSTANCE, new ExecutableSearchTransform(searchTransform, logger, client, searchTemplateService, TimeValue.timeValueMinutes(1)), new TimeValue(0), diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java index c1967d9e8ce..d1b8963950d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.watcher.test.bench; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; @@ -33,9 +31,6 @@ import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interva @SuppressForbidden(reason = "benchmark") public class ScheduleEngineTriggerBenchmark { - - private static final Logger logger = ESLoggerFactory.getLogger(ScheduleEngineTriggerBenchmark.class); - public static void main(String[] args) throws Exception { int numWatches = 1000; int interval = 2; @@ -61,7 +56,7 @@ public class ScheduleEngineTriggerBenchmark { .build(); List watches = new ArrayList<>(numWatches); for (int i = 0; i < numWatches; i++) { - watches.add(new Watch("job_" + i, new ScheduleTrigger(interval(interval + "s")), new ExecutableNoneInput(logger), + watches.add(new Watch("job_" + i, new ScheduleTrigger(interval(interval + "s")), new ExecutableNoneInput(), InternalAlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, null, 1L)); } ScheduleRegistry scheduleRegistry = new ScheduleRegistry(emptySet()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java index 4ca412c5ce0..80e802cf817 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.watcher.test.bench; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.Client; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.node.MockNode; @@ -207,7 +207,7 @@ public class WatcherExecutorServiceBenchmark { public BenchmarkWatcher(Settings settings) { super(settings); - Loggers.getLogger(BenchmarkWatcher.class, settings).info("using watcher benchmark plugin"); + LogManager.getLogger(BenchmarkWatcher.class).info("using watcher benchmark plugin"); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index b66a70c23af..45b85caacc0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -101,9 +101,9 @@ public class SearchInputTests extends ESTestCase { SearchSourceBuilder searchSourceBuilder = searchSource().query(boolQuery().must(matchQuery("event_type", "a"))); WatcherSearchTemplateRequest request = WatcherTestUtils.templateRequest(searchSourceBuilder); - ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger, + ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), client, watcherSearchTemplateService(), TimeValue.timeValueMinutes(1)); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple()); @@ -127,9 +127,9 @@ public class SearchInputTests extends ESTestCase { SearchType searchType = getRandomSupportedSearchType(); WatcherSearchTemplateRequest request = WatcherTestUtils.templateRequest(searchSourceBuilder, searchType); - ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger, + ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), client, watcherSearchTemplateService(), TimeValue.timeValueMinutes(1)); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple()); assertThat(result.status(), is(Input.Result.Status.SUCCESS)); @@ -179,7 +179,7 @@ public class SearchInputTests extends ESTestCase { assertThat(input.getRequest().getSearchSource(), is(BytesArray.EMPTY)); ExecutableSearchInput executableSearchInput = factory.createExecutable(input); - WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(logger); + WatchExecutionContext ctx = WatcherTestUtils.createWatchExecutionContext(); SearchInput.Result result = executableSearchInput.execute(ctx, Payload.Simple.EMPTY); assertThat(result.status(), is(Input.Result.Status.SUCCESS)); // no body in the search request diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java index 752b753f028..daaad496f69 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java @@ -133,7 +133,7 @@ public class ScriptTransformTests extends ESTestCase { XContentParser parser = createParser(builder); parser.nextToken(); - ExecutableScriptTransform transform = new ScriptTransformFactory(Settings.EMPTY, service).parseExecutable("_id", parser); + ExecutableScriptTransform transform = new ScriptTransformFactory(service).parseExecutable("_id", parser); Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", singletonMap("key", "value")); assertThat(transform.transform().getScript(), equalTo(script)); } @@ -144,7 +144,7 @@ public class ScriptTransformTests extends ESTestCase { XContentParser parser = createParser(builder); parser.nextToken(); - ExecutableScriptTransform transform = new ScriptTransformFactory(Settings.EMPTY, service).parseExecutable("_id", parser); + ExecutableScriptTransform transform = new ScriptTransformFactory(service).parseExecutable("_id", parser); assertEquals(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "_script", emptyMap()), transform.transform().getScript()); } @@ -155,7 +155,7 @@ public class ScriptTransformTests extends ESTestCase { Collections.emptyList(), "whatever", "whatever"); when(scriptService.compile(anyObject(), eq(WatcherTransformScript.CONTEXT))).thenThrow(scriptException); - ScriptTransformFactory transformFactory = new ScriptTransformFactory(Settings.builder().build(), scriptService); + ScriptTransformFactory transformFactory = new ScriptTransformFactory(scriptService); XContentBuilder builder = jsonBuilder().startObject() .field(scriptTypeField(randomFrom(ScriptType.values())), "whatever") @@ -170,7 +170,7 @@ public class ScriptTransformTests extends ESTestCase { } public void testScriptConditionParserBadLang() throws Exception { - ScriptTransformFactory transformFactory = new ScriptTransformFactory(Settings.builder().build(), createScriptService()); + ScriptTransformFactory transformFactory = new ScriptTransformFactory(createScriptService()); String script = "return true"; XContentBuilder builder = jsonBuilder().startObject() .field(scriptTypeField(ScriptType.INLINE), script) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java index 2f502cb95aa..065981a4260 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java @@ -83,7 +83,7 @@ public class DeleteWatchTests extends AbstractWatcherIntegrationTestCase { // watch has been executed successfully String state = ObjectPath.eval("state", source); assertThat(state, is("executed")); - // no exception occured + // no exception occurred assertThat(source, not(hasKey("exception"))); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java index 53d5458706a..c743922a4d7 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java @@ -105,7 +105,7 @@ public class ExecuteWatchTests extends AbstractWatcherIntegrationTestCase { } if (mode.force()) { - // since we're forcing, lets ack the action, such that it'd suppoed to be throttled + // since we're forcing, lets ack the action, such that it'd supposed to be throttled // but forcing will ignore the throttling // lets wait for the watch to be ackable diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java index 331416bd690..4f0c78b1e0d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.watcher.trigger; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; @@ -30,13 +30,12 @@ import java.util.concurrent.ConcurrentMap; * jobCount. */ public class ScheduleTriggerEngineMock extends ScheduleTriggerEngine { + private static final Logger logger = LogManager.getLogger(ScheduleTriggerEngineMock.class); - private final Logger logger; private final ConcurrentMap watches = new ConcurrentHashMap<>(); public ScheduleTriggerEngineMock(Settings settings, ScheduleRegistry scheduleRegistry, Clock clock) { super(settings, scheduleRegistry, clock); - this.logger = Loggers.getLogger(ScheduleTriggerEngineMock.class, settings); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java index 4aa34738cef..f0d3e88b127 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java @@ -144,7 +144,7 @@ public class TriggerServiceTests extends ESTestCase { } private void setInput(Watch watch) { - ExecutableNoneInput noneInput = new ExecutableNoneInput(logger); + ExecutableNoneInput noneInput = new ExecutableNoneInput(); when(watch.input()).thenReturn(noneInput); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java index db1d3767b59..5fd5ad5b16f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java @@ -255,7 +255,7 @@ public class TickerScheduleEngineTests extends ESTestCase { } private Watch createWatch(String name, Schedule schedule) { - return new Watch(name, new ScheduleTrigger(schedule), new ExecutableNoneInput(logger), + return new Watch(name, new ScheduleTrigger(schedule), new ExecutableNoneInput(), InternalAlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, null, Versions.MATCH_ANY); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index fff07cfa010..2e09c7446db 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -298,7 +298,7 @@ public class WatchTests extends ESTestCase { TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); - InputRegistry inputRegistry = registry(new ExecutableNoneInput(logger).type()); + InputRegistry inputRegistry = registry(new ExecutableNoneInput().type()); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(Collections.emptyList(), conditionRegistry, transformRegistry); @@ -509,10 +509,10 @@ public class WatchTests extends ESTestCase { SearchInput searchInput = searchInput(WatcherTestUtils.templateRequest(searchSource(), "idx")) .timeout(randomBoolean() ? null : timeValueSeconds(between(1, 10000))) .build(); - return new ExecutableSearchInput(searchInput, logger, client, searchTemplateService, null); + return new ExecutableSearchInput(searchInput, client, searchTemplateService, null); default: SimpleInput simpleInput = InputBuilders.simpleInput(singletonMap("_key", "_val")).build(); - return new ExecutableSimpleInput(simpleInput, logger); + return new ExecutableSimpleInput(simpleInput); } } @@ -521,10 +521,10 @@ public class WatchTests extends ESTestCase { switch (inputType) { case SearchInput.TYPE: parsers.put(SearchInput.TYPE, new SearchInputFactory(settings, client, xContentRegistry(), scriptService)); - return new InputRegistry(Settings.EMPTY, parsers); + return new InputRegistry(parsers); default: - parsers.put(SimpleInput.TYPE, new SimpleInputFactory(settings)); - return new InputRegistry(Settings.EMPTY, parsers); + parsers.put(SimpleInput.TYPE, new SimpleInputFactory()); + return new InputRegistry(parsers); } } @@ -568,7 +568,7 @@ public class WatchTests extends ESTestCase { private TransformRegistry transformRegistry() { Map factories = new HashMap<>(); - factories.put(ScriptTransform.TYPE, new ScriptTransformFactory(settings, scriptService)); + factories.put(ScriptTransform.TYPE, new ScriptTransformFactory(scriptService)); factories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, client, xContentRegistry(), scriptService)); return new TransformRegistry(unmodifiableMap(factories)); } @@ -618,7 +618,7 @@ public class WatchTests extends ESTestCase { parsers.put(IndexAction.TYPE, new IndexActionFactory(settings, client)); break; case WebhookAction.TYPE: - parsers.put(WebhookAction.TYPE, new WebhookActionFactory(settings, httpClient, templateEngine)); + parsers.put(WebhookAction.TYPE, new WebhookActionFactory(httpClient, templateEngine)); break; case LoggingAction.TYPE: parsers.put(LoggingAction.TYPE, new LoggingActionFactory(new MockTextTemplateEngine())); diff --git a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java index 7fcca3d9f58..57182f534c2 100644 --- a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java +++ b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.security.authc.kerberos; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.xpack.security.authc.kerberos.KerberosTicketValidator; import org.ietf.jgss.GSSContext; @@ -50,7 +50,7 @@ import javax.security.auth.login.LoginException; * {@link GSSContext} after usage. */ class SpnegoClient implements AutoCloseable { - private static final Logger LOGGER = ESLoggerFactory.getLogger(SpnegoClient.class); + private static final Logger LOGGER = LogManager.getLogger(SpnegoClient.class); public static final String CRED_CONF_NAME = "PasswordConf"; private static final String SUN_KRB5_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule"; diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml index a015a88a315..ff5fad0e82d 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml @@ -62,7 +62,7 @@ teardown: ignore: 404 --- -"Test shared index seperating user by using DLS role query with user's username": +"Test shared index separating user by using DLS role query with user's username": - do: xpack.security.put_role: name: "small_companies_role" @@ -130,7 +130,7 @@ teardown: - match: { hits.hits.0._source.user.username: john} --- -"Test shared index seperating user by using DLS role query with user's metadata": +"Test shared index separating user by using DLS role query with user's metadata": - do: xpack.security.put_role: name: "small_companies_role" diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java index e1c94ce463f..51440cc68dd 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java @@ -213,7 +213,7 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase { /** * Test the hijacking a scroll fails. This test is only implemented for * REST because it is the only API where it is simple to hijack a scroll. - * It should excercise the same code as the other APIs but if we were truly + * It should exercise the same code as the other APIs but if we were truly * paranoid we'd hack together something to test the others as well. */ public void testHijackScrollFails() throws Exception { diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java index 1bea73b56a2..c9076e38a0d 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java @@ -264,7 +264,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { createAuditLogAsserter() .expectSqlCompositeAction("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. - * If we asserted query and scroll logs then we would see the scoll. */ + * If we asserted query and scroll logs then we would see the scroll. */ .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expectSqlCompositeAction("only_a", "test") diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java index f0697f553ae..b4e87d3e207 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java @@ -59,15 +59,15 @@ public abstract class ShowTestCase extends CliIntegrationTestCase { public void testShowFunctionsLikeInfix() throws IOException { assertThat(command("SHOW FUNCTIONS LIKE '%DAY%'"), RegexMatcher.matches("\\s*name\\s*\\|\\s*type\\s*")); assertThat(readLine(), containsString("----------")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAY_NAME\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAYNAME\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_MONTH\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAYOFMONTH\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*DAY\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_WEEK\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAYNAME\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAYOFMONTH\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*DAYOFWEEK\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_YEAR\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*DAYOFYEAR\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAY_NAME\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_MONTH\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_WEEK\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_YEAR\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*HOUR_OF_DAY\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*MINUTE_OF_DAY\\s*\\|\\s*SCALAR\\s*")); assertEquals("", readLine()); diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 26e94b445c7..d1fa73c272e 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -12,40 +12,40 @@ COUNT |AGGREGATE MAX |AGGREGATE MIN |AGGREGATE SUM |AGGREGATE -STDDEV_POP |AGGREGATE -VAR_POP |AGGREGATE +KURTOSIS |AGGREGATE PERCENTILE |AGGREGATE PERCENTILE_RANK |AGGREGATE -SUM_OF_SQUARES |AGGREGATE SKEWNESS |AGGREGATE -KURTOSIS |AGGREGATE -DAY_NAME |SCALAR -DAYNAME |SCALAR -DAY_OF_MONTH |SCALAR -DAYOFMONTH |SCALAR +STDDEV_POP |AGGREGATE +SUM_OF_SQUARES |AGGREGATE +VAR_POP |AGGREGATE DAY |SCALAR -DOM |SCALAR -DAY_OF_WEEK |SCALAR +DAYNAME |SCALAR +DAYOFMONTH |SCALAR DAYOFWEEK |SCALAR -DOW |SCALAR -DAY_OF_YEAR |SCALAR DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +DOM |SCALAR +DOW |SCALAR DOY |SCALAR -HOUR_OF_DAY |SCALAR HOUR |SCALAR +HOUR_OF_DAY |SCALAR +MINUTE |SCALAR MINUTE_OF_DAY |SCALAR MINUTE_OF_HOUR |SCALAR -MINUTE |SCALAR -MONTH_NAME |SCALAR -MONTHNAME |SCALAR -MONTH_OF_YEAR |SCALAR MONTH |SCALAR -SECOND_OF_MINUTE|SCALAR -SECOND |SCALAR +MONTHNAME |SCALAR +MONTH_NAME |SCALAR +MONTH_OF_YEAR |SCALAR QUARTER |SCALAR -YEAR |SCALAR +SECOND |SCALAR +SECOND_OF_MINUTE|SCALAR +WEEK |SCALAR WEEK_OF_YEAR |SCALAR -WEEK |SCALAR +YEAR |SCALAR ABS |SCALAR ACOS |SCALAR ASIN |SCALAR @@ -68,8 +68,8 @@ MOD |SCALAR PI |SCALAR POWER |SCALAR RADIANS |SCALAR -RANDOM |SCALAR RAND |SCALAR +RANDOM |SCALAR ROUND |SCALAR SIGN |SCALAR SIGNUM |SCALAR @@ -81,21 +81,22 @@ TRUNCATE |SCALAR ASCII |SCALAR BIT_LENGTH |SCALAR CHAR |SCALAR -CHAR_LENGTH |SCALAR CHARACTER_LENGTH|SCALAR -CONCAT |SCALAR -INSERT |SCALAR +CHAR_LENGTH |SCALAR +CONCAT |SCALAR +INSERT |SCALAR LCASE |SCALAR LEFT |SCALAR LENGTH |SCALAR LOCATE |SCALAR LTRIM |SCALAR +OCTET_LENGTH |SCALAR POSITION |SCALAR REPEAT |SCALAR REPLACE |SCALAR -RIGHT |SCALAR +RIGHT |SCALAR RTRIM |SCALAR -SPACE |SCALAR +SPACE |SCALAR SUBSTRING |SCALAR UCASE |SCALAR SCORE |SCORE @@ -134,15 +135,15 @@ showFunctionsWithLeadingPattern SHOW FUNCTIONS LIKE '%DAY%'; name:s | type:s -DAY_NAME |SCALAR -DAYNAME |SCALAR -DAY_OF_MONTH |SCALAR -DAYOFMONTH |SCALAR -DAY |SCALAR -DAY_OF_WEEK |SCALAR -DAYOFWEEK |SCALAR -DAY_OF_YEAR |SCALAR -DAYOFYEAR |SCALAR +DAY |SCALAR +DAYNAME |SCALAR +DAYOFMONTH |SCALAR +DAYOFWEEK |SCALAR +DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR HOUR_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR ; diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index 570bbb052f0..49e67cf5f36 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -188,40 +188,40 @@ COUNT |AGGREGATE MAX |AGGREGATE MIN |AGGREGATE SUM |AGGREGATE -STDDEV_POP |AGGREGATE -VAR_POP |AGGREGATE +KURTOSIS |AGGREGATE PERCENTILE |AGGREGATE PERCENTILE_RANK |AGGREGATE -SUM_OF_SQUARES |AGGREGATE SKEWNESS |AGGREGATE -KURTOSIS |AGGREGATE -DAY_NAME |SCALAR -DAYNAME |SCALAR -DAY_OF_MONTH |SCALAR -DAYOFMONTH |SCALAR +STDDEV_POP |AGGREGATE +SUM_OF_SQUARES |AGGREGATE +VAR_POP |AGGREGATE DAY |SCALAR -DOM |SCALAR -DAY_OF_WEEK |SCALAR +DAYNAME |SCALAR +DAYOFMONTH |SCALAR DAYOFWEEK |SCALAR -DOW |SCALAR -DAY_OF_YEAR |SCALAR DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +DOM |SCALAR +DOW |SCALAR DOY |SCALAR -HOUR_OF_DAY |SCALAR HOUR |SCALAR +HOUR_OF_DAY |SCALAR +MINUTE |SCALAR MINUTE_OF_DAY |SCALAR MINUTE_OF_HOUR |SCALAR -MINUTE |SCALAR -MONTH_NAME |SCALAR -MONTHNAME |SCALAR -MONTH_OF_YEAR |SCALAR MONTH |SCALAR -SECOND_OF_MINUTE|SCALAR -SECOND |SCALAR +MONTHNAME |SCALAR +MONTH_NAME |SCALAR +MONTH_OF_YEAR |SCALAR QUARTER |SCALAR -YEAR |SCALAR +SECOND |SCALAR +SECOND_OF_MINUTE|SCALAR +WEEK |SCALAR WEEK_OF_YEAR |SCALAR -WEEK |SCALAR +YEAR |SCALAR ABS |SCALAR ACOS |SCALAR ASIN |SCALAR @@ -244,8 +244,8 @@ MOD |SCALAR PI |SCALAR POWER |SCALAR RADIANS |SCALAR -RANDOM |SCALAR RAND |SCALAR +RANDOM |SCALAR ROUND |SCALAR SIGN |SCALAR SIGNUM |SCALAR @@ -257,24 +257,25 @@ TRUNCATE |SCALAR ASCII |SCALAR BIT_LENGTH |SCALAR CHAR |SCALAR -CHAR_LENGTH |SCALAR CHARACTER_LENGTH|SCALAR -CONCAT |SCALAR -INSERT |SCALAR +CHAR_LENGTH |SCALAR +CONCAT |SCALAR +INSERT |SCALAR LCASE |SCALAR LEFT |SCALAR LENGTH |SCALAR LOCATE |SCALAR LTRIM |SCALAR +OCTET_LENGTH |SCALAR POSITION |SCALAR REPEAT |SCALAR REPLACE |SCALAR -RIGHT |SCALAR +RIGHT |SCALAR RTRIM |SCALAR -SPACE |SCALAR +SPACE |SCALAR SUBSTRING |SCALAR UCASE |SCALAR -SCORE |SCORE +SCORE |SCORE // end::showFunctions ; @@ -322,15 +323,15 @@ SHOW FUNCTIONS LIKE '%DAY%'; name | type ---------------+--------------- -DAY_NAME |SCALAR -DAYNAME |SCALAR -DAY_OF_MONTH |SCALAR -DAYOFMONTH |SCALAR -DAY |SCALAR -DAY_OF_WEEK |SCALAR -DAYOFWEEK |SCALAR -DAY_OF_YEAR |SCALAR -DAYOFYEAR |SCALAR +DAY |SCALAR +DAYNAME |SCALAR +DAYOFMONTH |SCALAR +DAYOFWEEK |SCALAR +DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR HOUR_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR @@ -1007,6 +1008,16 @@ Elastic // end::stringLTrim ; +stringOctetLength +// tag::stringOctetLength +SELECT OCTET_LENGTH('Elastic'); + +OCTET_LENGTH(Elastic) +------------------- +7 +// end::stringOctetLength +; + stringPosition // tag::stringPosition SELECT POSITION('Elastic', 'Elasticsearch'); @@ -1342,6 +1353,16 @@ SELECT RADIANS(90), PI()/2; // end::mathInlineRadians ; +mathRandom +// tag::mathRandom +SELECT RANDOM(123); + + RANDOM(123) +------------------ +0.7231742029971469 +// end::mathRandom +; + mathRoundWithNegativeParameter // tag::mathRoundWithNegativeParameter SELECT ROUND(-345.153, -1) AS rounded; diff --git a/x-pack/qa/sql/src/main/resources/functions.csv-spec b/x-pack/qa/sql/src/main/resources/functions.csv-spec index b5e98c26440..b0c9323005f 100644 --- a/x-pack/qa/sql/src/main/resources/functions.csv-spec +++ b/x-pack/qa/sql/src/main/resources/functions.csv-spec @@ -366,6 +366,37 @@ bu |1 by |1 ; +octetLengthGroupByAndOrderBy +SELECT OCTET_LENGTH(first_name), COUNT(*) count FROM "test_emp" GROUP BY OCTET_LENGTH(first_name) ORDER BY OCTET_LENGTH(first_name) LIMIT 10; + +OCTET_LENGTH(first_name):i| count:l +3 |4 +4 |11 +5 |16 +6 |24 +7 |19 +8 |14 +9 |10 +10 |1 +11 |1 +; + +octetLengthOrderByFieldWithWhere +SELECT OCTET_LENGTH(first_name) len, first_name FROM "test_emp" WHERE OCTET_LENGTH(first_name) > 8 ORDER BY first_name LIMIT 10; + +len:i | first_name:s +10 |Adamantios +9 |Alejandro +9 |Alejandro +9 |Chirstian +9 |Cristinel +9 |Duangkaew +9 |Eberhardt +9 |Margareta +9 |Prasadram +11 |Sreekrishna +; + upperCasingTheSecondLetterFromTheRightFromFirstName SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;