Merge branch '7.x' into enrich-7.x

This commit is contained in:
James Baiera 2019-07-25 13:12:56 -04:00
commit c5528a25e6
2848 changed files with 49849 additions and 25836 deletions

41
.ci/bwcVersions Normal file
View File

@ -0,0 +1,41 @@
BWC_VERSION:
- "6.0.0"
- "6.0.1"
- "6.1.0"
- "6.1.1"
- "6.1.2"
- "6.1.3"
- "6.1.4"
- "6.2.0"
- "6.2.1"
- "6.2.2"
- "6.2.3"
- "6.2.4"
- "6.3.0"
- "6.3.1"
- "6.3.2"
- "6.4.0"
- "6.4.1"
- "6.4.2"
- "6.4.3"
- "6.5.0"
- "6.5.1"
- "6.5.2"
- "6.5.3"
- "6.5.4"
- "6.6.0"
- "6.6.1"
- "6.6.2"
- "6.7.0"
- "6.7.1"
- "6.7.2"
- "6.8.0"
- "6.8.1"
- "6.8.2"
- "7.0.0"
- "7.0.1"
- "7.1.0"
- "7.1.1"
- "7.2.0"
- "7.2.1"
- "7.3.0"

3
.gitignore vendored
View File

@ -41,3 +41,6 @@ html_docs
# random old stuff that we should look at the necessity of...
/tmp/
eclipse-build
# projects using testfixtures
testfixtures_shared/

View File

@ -29,16 +29,13 @@ import org.gradle.util.DistributionLocator
import org.gradle.plugins.ide.eclipse.model.SourceFolder
plugins {
id 'com.gradle.build-scan' version '2.2.1'
id 'com.gradle.build-scan' version '2.3'
id 'base'
id 'elasticsearch.global-build-info'
}
if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") {
buildScan {
termsOfServiceUrl = 'https://gradle.com/terms-of-service'
termsOfServiceAgree = 'yes'
}
}
apply plugin: 'nebula.info-scm'
apply from: 'gradle/build-scan.gradle'
// common maven publishing configuration
allprojects {
@ -49,7 +46,6 @@ allprojects {
BuildPlugin.configureRepositories(project)
apply plugin: 'nebula.info-scm'
String licenseCommit
if (VersionProperties.elasticsearch.toString().endsWith('-SNAPSHOT')) {
licenseCommit = scminfo.change ?: "master" // leniency for non git builds
@ -110,6 +106,17 @@ subprojects {
* *something* to test against. */
BwcVersions versions = new BwcVersions(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
task updateCIBwcVersions() {
doLast {
File yml = file(".ci/bwcVersions")
yml.text = ""
yml << "BWC_VERSION:\n"
versions.indexCompatible.each {
yml << " - \"$it\"\n"
}
}
}
// build metadata from previous build, contains eg hashes for bwc builds
String buildMetadataValue = System.getenv('BUILD_METADATA')
if (buildMetadataValue == null) {
@ -153,6 +160,12 @@ task verifyVersions {
.collect { Version.fromString(it) }
)
}
String ciYml = file(".ci/bwcVersions").text
bwcVersions.indexCompatible.each {
if (ciYml.contains("\"$it\"\n") == false) {
throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results");
}
}
}
}

View File

@ -66,9 +66,23 @@ if (JavaVersion.current() < JavaVersion.VERSION_11) {
throw new GradleException('At least Java 11 is required to build elasticsearch gradle tools')
}
// Keep compatibility with Java 8 for external users of build-tools that haven't migrated to Java 11
targetCompatibility = '8'
sourceCompatibility = '8'
sourceSets {
// We have a few classes that need to be compiled for older java versions
minimumRuntime { }
}
compileMinimumRuntimeJava {
targetCompatibility = 8
sourceCompatibility = 8
}
jar {
from sourceSets.minimumRuntime.output
}
javadoc {
source sourceSets.minimumRuntime.allSource
}
/*****************************************************************************
* Dependencies used by the entire build *
@ -79,6 +93,11 @@ repositories {
}
dependencies {
if (project.ext.has("isEclipse") == false || project.ext.isEclipse == false) {
// eclipse is confused if this is set explicitly
compile sourceSets.minimumRuntime.output
}
compile localGroovy()
compile 'commons-codec:commons-codec:1.12'
@ -96,6 +115,9 @@ dependencies {
testCompile "junit:junit:${props.getProperty('junit')}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
testCompile 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2'
minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}"
minimumRuntimeCompile localGroovy()
minimumRuntimeCompile gradleApi()
}
/*****************************************************************************
@ -123,9 +145,8 @@ if (project != rootProject) {
apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
// we need to apply these again to override the build plugin
targetCompatibility = "10"
sourceCompatibility = "10"
targetCompatibility = '10'
sourceCompatibility = '10'
// groovydoc succeeds, but has some weird internal exception...
groovydoc.enabled = false
@ -134,6 +155,7 @@ if (project != rootProject) {
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false
forbiddenApisMain.enabled = false
forbiddenApisMinimumRuntime.enabled = false
forbiddenApisTest.enabled = false
jarHell.enabled = false
thirdPartyAudit.enabled = false

View File

@ -23,7 +23,6 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
import groovy.transform.CompileDynamic
import groovy.transform.CompileStatic
import org.apache.commons.io.IOUtils
import org.apache.tools.ant.taskdefs.Java
import org.eclipse.jgit.lib.Constants
import org.eclipse.jgit.lib.RepositoryBuilder
import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin
@ -841,7 +840,7 @@ class BuildPlugin implements Plugin<Project> {
test.executable = "${ext.get('runtimeJavaHome')}/bin/java"
test.workingDir = project.file("${project.buildDir}/testrun/${test.name}")
test.maxParallelForks = project.rootProject.extensions.getByType(ExtraPropertiesExtension).get('defaultParallel') as Integer
test.maxParallelForks = System.getProperty('tests.jvms', project.rootProject.extensions.extraProperties.get('defaultParallel').toString()) as Integer
test.exclude '**/*$*.class'
@ -861,17 +860,25 @@ class BuildPlugin implements Plugin<Project> {
// we use './temp' since this is per JVM and tests are forbidden from writing to CWD
test.systemProperties 'gradle.dist.lib': new File(project.class.location.toURI()).parent,
'gradle.worker.jar': "${project.gradle.getGradleUserHomeDir()}/caches/${project.gradle.gradleVersion}/workerMain/gradle-worker.jar",
'gradle.user.home': project.gradle.getGradleUserHomeDir(),
'java.io.tmpdir': './temp',
'java.awt.headless': 'true',
'tests.gradle': 'true',
'tests.artifact': project.name,
'tests.task': test.path,
'tests.security.manager': 'true',
'tests.seed': project.property('testSeed'),
'jna.nosys': 'true'
// ignore changing test seed when build is passed -Dignore.tests.seed for cacheability experimentation
if (System.getProperty('ignore.tests.seed') != null) {
nonInputProperties.systemProperty('tests.seed', project.property('testSeed'))
} else {
test.systemProperty('tests.seed', project.property('testSeed'))
}
// don't track these as inputs since they contain absolute paths and break cache relocatability
nonInputProperties.systemProperty('gradle.worker.jar', "${project.gradle.getGradleUserHomeDir()}/caches/${project.gradle.gradleVersion}/workerMain/gradle-worker.jar")
nonInputProperties.systemProperty('gradle.user.home', project.gradle.getGradleUserHomeDir())
nonInputProperties.systemProperty('compiler.java', "${-> (ext.get('compilerJavaVersion') as JavaVersion).getMajorVersion()}")
// TODO: remove setting logging level via system property

View File

@ -1,64 +0,0 @@
package org.elasticsearch.gradle
import org.gradle.api.logging.LogLevel
import org.gradle.api.logging.Logger
/**
* Writes data passed to this stream as log messages.
*
* The stream will be flushed whenever a newline is detected.
* Allows setting an optional prefix before each line of output.
*/
public class LoggingOutputStream extends OutputStream {
/** The starting length of the buffer */
static final int DEFAULT_BUFFER_LENGTH = 4096
/** The buffer of bytes sent to the stream */
byte[] buffer = new byte[DEFAULT_BUFFER_LENGTH]
/** Offset of the start of unwritten data in the buffer */
int start = 0
/** Offset of the end (semi-open) of unwritten data in the buffer */
int end = 0
/** Logger to write stream data to */
Logger logger
/** Prefix to add before each line of output */
String prefix = ""
/** Log level to write log messages to */
LogLevel level
void write(final int b) throws IOException {
if (b == 0) return;
if (b == (int)'\n' as char) {
// always flush with newlines instead of adding to the buffer
flush()
return
}
if (end == buffer.length) {
if (start != 0) {
// first try shifting the used buffer back to the beginning to make space
System.arraycopy(buffer, start, buffer, 0, end - start)
} else {
// need more space, extend the buffer
}
final int newBufferLength = buffer.length + DEFAULT_BUFFER_LENGTH;
final byte[] newBuffer = new byte[newBufferLength];
System.arraycopy(buffer, 0, newBuffer, 0, buffer.length);
buffer = newBuffer;
}
buffer[end++] = (byte) b;
}
void flush() {
if (end == start) return
logger.log(level, prefix + new String(buffer, start, end - start));
start = end
}
}

View File

@ -21,11 +21,9 @@ package org.elasticsearch.gradle.doc
import org.elasticsearch.gradle.OS
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.test.ClusterFormationTasks
import org.elasticsearch.gradle.test.RestTestPlugin
import org.gradle.api.Project
import org.gradle.api.Task
/**
* Sets up tests for documentation.
*/
@ -38,7 +36,7 @@ public class DocsTestPlugin extends RestTestPlugin {
super.apply(project)
String distribution = System.getProperty('tests.distribution', 'default')
// The distribution can be configured with -Dtests.distribution on the command line
project.testClusters.integTest.distribution = distribution.toUpperCase()
project.testClusters.integTest.testDistribution = distribution.toUpperCase()
project.testClusters.integTest.nameCustomization = { it.replace("integTest", "node") }
// Docs are published separately so no need to assemble
project.tasks.assemble.enabled = false

View File

@ -353,7 +353,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
private void testSetup(Snippet snippet) {
if (lastDocsPath == snippet.path) {
throw new InvalidUserDataException("$snippet: wasn't first")
throw new InvalidUserDataException("$snippet: wasn't first. TESTSETUP can only be used in the first snippet of a document.")
}
setupCurrent(snippet)
current.println('---')

View File

@ -27,7 +27,9 @@ import org.elasticsearch.gradle.tool.ClasspathUtils
import org.gradle.api.JavaVersion
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.artifacts.Configuration
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.plugins.JavaPluginConvention
import org.gradle.api.plugins.quality.Checkstyle
/**
@ -45,13 +47,23 @@ class PrecommitTasks {
forbiddenApisCliJar('de.thetaphi:forbiddenapis:2.6')
}
Configuration jarHellConfig = project.configurations.create("jarHell")
if (ClasspathUtils.isElasticsearchProject() && project.path.equals(":libs:elasticsearch-core") == false) {
// External plugins will depend on this already via transitive dependencies.
// Internal projects are not all plugins, so make sure the check is available
// we are not doing this for this project itself to avoid jar hell with itself
project.dependencies {
jarHell project.project(":libs:elasticsearch-core")
}
}
List<Task> precommitTasks = [
configureCheckstyle(project),
configureForbiddenApisCli(project),
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
project.tasks.create('filepermissions', FilePermissionsTask.class),
configureJarHell(project),
configureJarHell(project, jarHellConfig),
configureThirdPartyAudit(project),
configureTestingConventions(project)
]
@ -108,12 +120,13 @@ class PrecommitTasks {
return task
}
private static Task configureJarHell(Project project) {
private static Task configureJarHell(Project project, Configuration jarHelConfig) {
return project.tasks.create('jarHell', JarHellTask) { task ->
task.classpath = project.sourceSets.test.runtimeClasspath
task.classpath = project.sourceSets.test.runtimeClasspath + jarHelConfig;
if (project.plugins.hasPlugin(ShadowPlugin)) {
task.classpath += project.configurations.bundle
}
task.dependsOn(jarHelConfig);
}
}

View File

@ -58,6 +58,9 @@ public class AntFixture extends AntTask implements Fixture {
@Input
boolean useShell = false
@Input
int maxWaitInSeconds = 30
/**
* A flag to indicate whether the fixture should be run in the foreground, or spawned.
* It is protected so subclasses can override (eg RunTask).
@ -128,7 +131,7 @@ public class AntFixture extends AntTask implements Fixture {
String failedProp = "failed${name}"
// first wait for resources, or the failure marker from the wrapper script
ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) {
ant.waitfor(maxwait: maxWaitInSeconds, maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) {
or {
resourceexists {
file(file: failureMarker.toString())

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.gradle.test
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.DefaultTask
@ -26,7 +26,6 @@ import org.gradle.api.Task
import org.gradle.api.execution.TaskExecutionAdapter
import org.gradle.api.logging.Logger
import org.gradle.api.logging.Logging
import org.gradle.api.specs.Specs
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskState
@ -38,7 +37,6 @@ import org.gradle.process.CommandLineArgumentProvider
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.util.stream.Stream
/**
* A wrapper task around setting up a cluster and running rest tests.
*/
@ -60,7 +58,7 @@ class RestIntegTestTask extends DefaultTask {
Boolean includePackaged = false
RestIntegTestTask() {
runner = project.tasks.create("${name}Runner", Test.class)
runner = project.tasks.create("${name}Runner", RestTestRunnerTask.class)
super.dependsOn(runner)
clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
runner.dependsOn(clusterInit)
@ -70,18 +68,12 @@ class RestIntegTestTask extends DefaultTask {
} else {
project.testClusters {
"$name" {
distribution = 'INTEG_TEST'
version = VersionProperties.elasticsearch
javaHome = project.file(project.ext.runtimeJavaHome)
}
}
runner.useCluster project.testClusters."$name"
}
// disable the build cache for rest test tasks
// there are a number of inputs we aren't properly tracking here so we'll just not cache these for now
runner.getOutputs().doNotCacheIf("Caching is disabled for REST integration tests", Specs.SATISFIES_ALL)
// override/add more for rest tests
runner.maxParallelForks = 1
runner.include('**/*IT.class')

View File

@ -40,9 +40,6 @@ public class BatsOverVagrantTask extends VagrantCommandTask {
@Override
protected OutputStream createLoggerOutputStream() {
return new TapLoggerOutputStream(
command: commandLine.join(' '),
factory: getProgressLoggerFactory(),
logger: logger)
return new TapLoggerOutputStream(logger, getProgressLoggerFactory().newOperation(boxName).setDescription(boxName));
}
}

View File

@ -1,111 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant
import org.elasticsearch.gradle.LoggingOutputStream
import org.gradle.api.GradleScriptException
import org.gradle.api.logging.Logger
import org.gradle.internal.logging.progress.ProgressLogger
import java.util.regex.Matcher
/**
* Adapts an OutputStream containing output from bats into a ProgressLogger
* and a Logger. Every test output goes to the ProgressLogger and all failures
* and non-test output goes to the Logger. That means you can always glance
* at the result of the last test and the cumulative pass/fail/skip stats and
* the failures are all logged.
*
* There is a Tap4j project but we can't use it because it wants to parse the
* entire TAP stream at once and won't parse it stream-wise.
*/
public class TapLoggerOutputStream extends LoggingOutputStream {
private final ProgressLogger progressLogger
private boolean isStarted = false
private final Logger logger
private int testsCompleted = 0
private int testsFailed = 0
private int testsSkipped = 0
private Integer testCount
private String countsFormat
TapLoggerOutputStream(Map args) {
logger = args.logger
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
progressLogger.setDescription("TAP output for `${args.command}`")
}
@Override
public void flush() {
if (isStarted == false) {
progressLogger.started()
isStarted = true
}
if (end == start) return
line(new String(buffer, start, end - start))
start = end
}
void line(String line) {
// System.out.print "===> $line\n"
if (testCount == null) {
try {
testCount = line.split('\\.').last().toInteger()
def length = (testCount as String).length()
countsFormat = "%0${length}d"
countsFormat = "[$countsFormat|$countsFormat|$countsFormat/$countsFormat]"
return
} catch (Exception e) {
throw new GradleScriptException(
'Error parsing first line of TAP stream!!', e)
}
}
Matcher m = line =~ /(?<status>ok|not ok) \d+(?<skip> # skip (?<skipReason>\(.+\))?)? \[(?<suite>.+)\] (?<test>.+)/
if (!m.matches()) {
/* These might be failure report lines or comments or whatever. Its hard
to tell and it doesn't matter. */
logger.warn(line)
return
}
boolean skipped = m.group('skip') != null
boolean success = !skipped && m.group('status') == 'ok'
String skipReason = m.group('skipReason')
String suiteName = m.group('suite')
String testName = m.group('test')
String status
if (skipped) {
status = "SKIPPED"
testsSkipped++
} else if (success) {
status = " OK"
testsCompleted++
} else {
status = " FAILED"
testsFailed++
}
String counts = sprintf(countsFormat,
[testsCompleted, testsFailed, testsSkipped, testCount])
progressLogger.progress("Tests $counts, $status [$suiteName] $testName")
if (!success) {
logger.warn(line)
}
}
}

View File

@ -78,11 +78,9 @@ public class VagrantCommandTask extends LoggedExec {
}
protected OutputStream createLoggerOutputStream() {
return new VagrantLoggerOutputStream(
command: commandLine.join(' '),
factory: getProgressLoggerFactory(),
return new VagrantLoggerOutputStream(getProgressLoggerFactory().newOperation(boxName + " " + command).setDescription(boxName),
/* Vagrant tends to output a lot of stuff, but most of the important
stuff starts with ==> $box */
squashedPrefix: "==> $boxName: ")
"==> $boxName: ")
}
}

View File

@ -1,101 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant
import org.elasticsearch.gradle.LoggingOutputStream
import org.gradle.internal.logging.progress.ProgressLogger
/**
* Adapts an OutputStream being written to by vagrant into a ProcessLogger. It
* has three hacks to make the output nice:
*
* 1. Attempt to filter out the "unimportant" output from vagrant. Usually
* vagrant prefixes its more important output with "==> $boxname: ". The stuff
* that isn't prefixed that way can just be thrown out.
*
* 2. It also attempts to detect when vagrant does tricks assuming its writing
* to a terminal emulator and renders the output more like gradle users expect.
* This means that progress indicators for things like box downloading work and
* box importing look pretty good.
*
* 3. It catches lines that look like "==> $boxName ==> Heading text" and stores
* the text after the second arrow as a "heading" for use in annotating
* provisioning. It does this because provisioning can spit out _lots_ of text
* and its very easy to lose context when there isn't a scrollback. So we've
* sprinkled `echo "==> Heading text"` into the provisioning scripts for this
* to catch so it can render the output like
* "Heading text > stdout from the provisioner".
*/
public class VagrantLoggerOutputStream extends LoggingOutputStream {
private static final String HEADING_PREFIX = '==> '
private final ProgressLogger progressLogger
private boolean isStarted = false
private String squashedPrefix
private String lastLine = ''
private boolean inProgressReport = false
private String heading = ''
VagrantLoggerOutputStream(Map args) {
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
progressLogger.setDescription("Vagrant output for `$args.command`")
squashedPrefix = args.squashedPrefix
}
@Override
public void flush() {
if (isStarted == false) {
progressLogger.started()
isStarted = true
}
if (end == start) return
line(new String(buffer, start, end - start))
start = end
}
void line(String line) {
if (line.startsWith('\r\u001b')) {
/* We don't want to try to be a full terminal emulator but we want to
keep the escape sequences from leaking and catch _some_ of the
meaning. */
line = line.substring(2)
if ('[K' == line) {
inProgressReport = true
}
return
}
if (line.startsWith(squashedPrefix)) {
line = line.substring(squashedPrefix.length())
inProgressReport = false
lastLine = line
if (line.startsWith(HEADING_PREFIX)) {
line = line.substring(HEADING_PREFIX.length())
heading = line + ' > '
} else {
line = heading + line
}
} else if (inProgressReport) {
inProgressReport = false
line = lastLine + line
} else {
return
}
progressLogger.progress(line)
}
}

View File

@ -312,7 +312,7 @@ class VagrantTestPlugin implements Plugin<Project> {
test_args=( "\$@" )
fi
"${-> convertPath(project, linuxGradleJdk.toString()) }"/bin/java -cp "\$PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner "\${test_args[@]}"
"${-> convertLinuxPath(project, linuxGradleJdk.toString()) }"/bin/java -cp "\$PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner "\${test_args[@]}"
"""
}
Task createWindowsRunnerScript = project.tasks.create('createWindowsRunnerScript', FileContentsTask) {
@ -321,13 +321,19 @@ class VagrantTestPlugin implements Plugin<Project> {
// the use of $args rather than param() here is deliberate because the syntax for array (multivalued) parameters is likely
// a little trappy for those unfamiliar with powershell
contents """\
if (\$args.Count -eq 0) {
\$testArgs = @("${-> project.extensions.esvagrant.testClass}")
} else {
\$testArgs = \$args
try {
if (\$args.Count -eq 0) {
\$testArgs = @("${-> project.extensions.esvagrant.testClass}")
} else {
\$testArgs = \$args
}
& "${-> convertWindowsPath(project, windowsGradleJdk.toString()) }/bin/java" -cp "\$Env:PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner @testArgs
exit \$LASTEXITCODE
} catch {
# catch if we have a failure to even run the script at all above, equivalent to set -e, sort of
echo "\$_.Exception.Message"
exit 1
}
& "${-> convertPath(project, windowsGradleJdk.toString()) }"/bin/java -cp "\$Env:PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner @testArgs
exit \$LASTEXITCODE
"""
}
@ -610,7 +616,7 @@ class VagrantTestPlugin implements Plugin<Project> {
// https://github.com/hashicorp/vagrant/blob/9c299a2a357fcf87f356bb9d56e18a037a53d138/plugins/communicators/winrm/communicator.rb#L195-L225
// https://devops-collective-inc.gitbooks.io/secrets-of-powershell-remoting/content/manuscript/accessing-remote-computers.html
javaPackagingTest.command = 'winrm'
javaPackagingTest.args = ['--elevated', '--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"']
javaPackagingTest.args = ['--elevated', '--command', '& "$Env:PACKAGING_TESTS/run-tests.ps1"; exit $LASTEXITCODE']
}
TaskExecutionAdapter javaPackagingReproListener = createReproListener(project, javaPackagingTest.path)
@ -643,7 +649,10 @@ class VagrantTestPlugin implements Plugin<Project> {
}
// convert the given path from an elasticsearch repo path to a VM path
private String convertPath(Project project, String path) {
private String convertLinuxPath(Project project, String path) {
return "/elasticsearch/" + project.rootDir.toPath().relativize(Paths.get(path));
}
private String convertWindowsPath(Project project, String path) {
return "C:\\elasticsearch\\" + project.rootDir.toPath().relativize(Paths.get(path)).toString().replace('/', '\\');
}
}

View File

@ -0,0 +1,27 @@
package org.elasticsearch.gradle;
import java.util.List;
public abstract class AbstractLazyPropertyCollection {
final String name;
final Object owner;
public AbstractLazyPropertyCollection(String name) {
this(name, null);
}
public AbstractLazyPropertyCollection(String name, Object owner) {
this.name = name;
this.owner = owner;
}
abstract List<? extends Object> getNormalizedCollection();
void assertNotNull(Object value, String description) {
if (value == null) {
throw new NullPointerException(name + " " + description + " was null" + (owner != null ? " when configuring " + owner : ""));
}
}
}

View File

@ -1,80 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
public enum Distribution {
INTEG_TEST("elasticsearch"),
DEFAULT("elasticsearch"),
OSS("elasticsearch-oss");
private final String artifactName;
Distribution(String name) {
this.artifactName = name;
}
public String getArtifactName() {
return artifactName;
}
public String getGroup() {
if (this.equals(INTEG_TEST)) {
return "org.elasticsearch.distribution.integ-test-zip";
} else {
return "org.elasticsearch.distribution." + name().toLowerCase();
}
}
public String getFileExtension() {
if (this.equals(INTEG_TEST)) {
return "zip";
} else {
return OS.conditionalString()
.onUnix(() -> "tar.gz")
.onWindows(() -> "zip")
.supply();
}
}
public String getClassifier() {
if (this.equals(INTEG_TEST)) {
return "";
} else {
return OS.<String>conditional()
.onLinux(() -> "linux-x86_64")
.onWindows(() -> "windows-x86_64")
.onMac(() -> "darwin-x86_64")
.supply();
}
}
public String getLiveConfiguration() {
if (this.equals(INTEG_TEST)) {
return "integ-test-zip";
} else {
return (this.equals(OSS) ? "oss-" : "") + OS.<String>conditional()
.onLinux(() -> "linux-tar")
.onWindows(() -> "windows-zip")
.onMac(() -> "darwin-tar")
.supply();
}
}
}

View File

@ -54,6 +54,7 @@ import java.util.function.Supplier;
*/
public class DistributionDownloadPlugin implements Plugin<Project> {
private static final String CONTAINER_NAME = "elasticsearch_distributions";
private static final String FAKE_IVY_GROUP = "elasticsearch-distribution";
private static final String DOWNLOAD_REPO_NAME = "elasticsearch-downloads";
@ -67,7 +68,7 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
Configuration extractedConfiguration = project.getConfigurations().create("es_distro_extracted_" + name);
return new ElasticsearchDistribution(name, project.getObjects(), fileConfiguration, extractedConfiguration);
});
project.getExtensions().add("elasticsearch_distributions", distributionsContainer);
project.getExtensions().add(CONTAINER_NAME, distributionsContainer);
setupDownloadServiceRepo(project);
@ -78,6 +79,11 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
project.afterEvaluate(this::setupDistributions);
}
@SuppressWarnings("unchecked")
public static NamedDomainObjectContainer<ElasticsearchDistribution> getContainer(Project project) {
return (NamedDomainObjectContainer<ElasticsearchDistribution>) project.getExtensions().getByName(CONTAINER_NAME);
}
// pkg private for tests
void setupDistributions(Project project) {
for (ElasticsearchDistribution distribution : distributionsContainer) {

View File

@ -20,7 +20,9 @@
package org.elasticsearch.gradle;
import org.gradle.api.Buildable;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.FileTree;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.TaskDependency;
@ -28,6 +30,7 @@ import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Iterator;
import java.util.Locale;
import java.util.concurrent.Callable;
public class ElasticsearchDistribution implements Buildable {
@ -90,6 +93,10 @@ public class ElasticsearchDistribution implements Buildable {
return configuration.getBuildDependencies();
}
public FileTree getFileTree(Project project) {
return project.fileTree((Callable<File>) configuration::getSingleFile);
}
@Override
public String toString() {
return configuration.getSingleFile().toString();

View File

@ -0,0 +1,205 @@
package org.elasticsearch.gradle;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Nested;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implements List<T> {
private final List<PropertyListEntry<T>> delegate = new ArrayList<>();
public LazyPropertyList(String name) {
super(name);
}
public LazyPropertyList(String name, Object owner) {
super(name, owner);
}
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean contains(Object o) {
return delegate.stream().anyMatch(entry -> entry.getValue().equals(o));
}
@Override
public Iterator<T> iterator() {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).iterator();
}
@Override
public Object[] toArray() {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).toArray();
}
@Override
public <T1> T1[] toArray(T1[] a) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).toArray(a);
}
@Override
public boolean add(T t) {
return delegate.add(new PropertyListEntry<>(() -> t, PropertyNormalization.DEFAULT));
}
public boolean add(Supplier<T> supplier) {
return delegate.add(new PropertyListEntry<>(supplier, PropertyNormalization.DEFAULT));
}
public boolean add(Supplier<T> supplier, PropertyNormalization normalization) {
return delegate.add(new PropertyListEntry<>(supplier, normalization));
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support remove()");
}
@Override
public boolean containsAll(Collection<?> c) {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).containsAll(c);
}
@Override
public boolean addAll(Collection<? extends T> c) {
c.forEach(this::add);
return true;
}
@Override
public boolean addAll(int index, Collection<? extends T> c) {
int i = index;
for (T item : c) {
this.add(i++, item);
}
return true;
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support removeAll()");
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support retainAll()");
}
@Override
public void clear() {
delegate.clear();
}
@Override
public T get(int index) {
PropertyListEntry<T> entry = delegate.get(index);
validate(entry);
return entry.getValue();
}
@Override
public T set(int index, T element) {
return delegate.set(index, new PropertyListEntry<>(() -> element, PropertyNormalization.DEFAULT)).getValue();
}
@Override
public void add(int index, T element) {
delegate.add(index, new PropertyListEntry<>(() -> element, PropertyNormalization.DEFAULT));
}
@Override
public T remove(int index) {
return delegate.remove(index).getValue();
}
@Override
public int indexOf(Object o) {
for (int i = 0; i < delegate.size(); i++) {
if (delegate.get(i).getValue().equals(o)) {
return i;
}
}
return -1;
}
@Override
public int lastIndexOf(Object o) {
int lastIndex = -1;
for (int i = 0; i < delegate.size(); i++) {
if (delegate.get(i).getValue().equals(o)) {
lastIndex = i;
}
}
return lastIndex;
}
@Override
public ListIterator<T> listIterator() {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).listIterator();
}
@Override
public ListIterator<T> listIterator(int index) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).listIterator(index);
}
@Override
public List<T> subList(int fromIndex, int toIndex) {
return delegate.stream()
.peek(this::validate)
.map(PropertyListEntry::getValue)
.collect(Collectors.toList())
.subList(fromIndex, toIndex);
}
@Override
@Nested
List<? extends Object> getNormalizedCollection() {
return delegate.stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)
.collect(Collectors.toList());
}
private void validate(PropertyListEntry<T> entry) {
assertNotNull(entry.getValue(), "entry");
}
private class PropertyListEntry<T> {
private final Supplier<T> supplier;
private final PropertyNormalization normalization;
PropertyListEntry(Supplier<T> supplier, PropertyNormalization normalization) {
this.supplier = supplier;
this.normalization = normalization;
}
public PropertyNormalization getNormalization() {
return normalization;
}
@Input
public T getValue() {
assertNotNull(supplier, "supplier");
return supplier.get();
}
}
}

View File

@ -0,0 +1,167 @@
package org.elasticsearch.gradle;
import org.gradle.api.Named;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Nested;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class LazyPropertyMap<K, V> extends AbstractLazyPropertyCollection implements Map<K, V> {
private final Map<K, PropertyMapEntry<K, V>> delegate = new LinkedHashMap<>();
private final BiFunction<K, V, ?> normalizationMapper;
public LazyPropertyMap(String name) {
this(name, null);
}
public LazyPropertyMap(String name, Object owner) {
this(name, owner, null);
}
public LazyPropertyMap(String name, Object owner, BiFunction<K, V, ?> normalizationMapper) {
super(name, owner);
this.normalizationMapper = normalizationMapper;
}
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean containsKey(Object key) {
return delegate.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return delegate.values().stream().map(PropertyMapEntry::getValue).anyMatch(v -> v.equals(value));
}
@Override
public V get(Object key) {
PropertyMapEntry<K, V> entry = delegate.get(key);
if (entry != null) {
V value = entry.getValue();
assertNotNull(value, "value for key '" + key + "'");
return value;
} else {
return null;
}
}
@Override
public V put(K key, V value) {
return put(key, value, PropertyNormalization.DEFAULT);
}
public V put(K key, V value, PropertyNormalization normalization) {
assertNotNull(value, "value for key '" + key + "'");
return put(key, () -> value, normalization);
}
public V put(K key, Supplier<V> supplier) {
return put(key, supplier, PropertyNormalization.DEFAULT);
}
public V put(K key, Supplier<V> supplier, PropertyNormalization normalization) {
assertNotNull(supplier, "supplier for key '" + key + "'");
PropertyMapEntry<K, V> previous = delegate.put(key, new PropertyMapEntry<>(key, supplier, normalization));
return previous == null ? null : previous.getValue();
}
@Override
public V remove(Object key) {
PropertyMapEntry<K, V> previous = delegate.remove(key);
return previous == null ? null : previous.getValue();
}
@Override
public void putAll(Map<? extends K, ? extends V> m) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support putAll()");
}
@Override
public void clear() {
delegate.clear();
}
@Override
public Set<K> keySet() {
return delegate.keySet();
}
@Override
public Collection<V> values() {
return delegate.values().stream().peek(this::validate).map(PropertyMapEntry::getValue).collect(Collectors.toList());
}
@Override
public Set<Entry<K, V>> entrySet() {
return delegate.entrySet().stream()
.peek(this::validate)
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getValue())).entrySet();
}
@Override
@Nested
List<? extends Object> getNormalizedCollection() {
return delegate.values().stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)
.map(entry -> normalizationMapper == null ? entry : normalizationMapper.apply(entry.getKey(), entry.getValue()))
.collect(Collectors.toList());
}
private void validate(Map.Entry<K, PropertyMapEntry<K, V>> entry) {
validate(entry.getValue());
}
private void validate(PropertyMapEntry<K, V> supplier) {
assertNotNull(supplier, "key '" + supplier.getKey() + "' supplier value");
}
private static class PropertyMapEntry<K, V> implements Named {
private final K key;
private final Supplier<V> value;
private final PropertyNormalization normalization;
PropertyMapEntry(K key, Supplier<V> value, PropertyNormalization normalization) {
this.key = key;
this.value = value;
this.normalization = normalization;
}
public PropertyNormalization getNormalization() {
return normalization;
}
@Override
public String getName() {
return getKey().toString();
}
@Input
public K getKey() {
return key;
}
@Input
public V getValue() {
return value.get();
}
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
/**
* Writes data passed to this stream as log messages.
*
* The stream will be flushed whenever a newline is detected.
* Allows setting an optional prefix before each line of output.
*/
public abstract class LoggingOutputStream extends OutputStream {
/** The starting length of the buffer */
private static final int DEFAULT_BUFFER_LENGTH = 4096;
/** The buffer of bytes sent to the stream */
private byte[] buffer = new byte[DEFAULT_BUFFER_LENGTH];
/** Offset of the start of unwritten data in the buffer */
private int start = 0;
/** Offset of the end (semi-open) of unwritten data in the buffer */
private int end = 0;
@Override
public void write(final int b) throws IOException {
if (b == 0) return;
if (b == '\n') {
// always flush with newlines instead of adding to the buffer
flush();
return;
}
if (end == buffer.length) {
if (start != 0) {
// first try shifting the used buffer back to the beginning to make space
int len = end - start;
System.arraycopy(buffer, start, buffer, 0, len);
start = 0;
end = len;
} else {
// otherwise extend the buffer
buffer = Arrays.copyOf(buffer, buffer.length + DEFAULT_BUFFER_LENGTH);
}
}
buffer[end++] = (byte) b;
}
@Override
public void flush() {
if (end == start) return;
logLine(new String(buffer, start, end - start));
start = end;
}
protected abstract void logLine(String line);
}

View File

@ -0,0 +1,13 @@
package org.elasticsearch.gradle;
public enum PropertyNormalization {
/**
* Uses default strategy based on runtime property type.
*/
DEFAULT,
/**
* Ignores property value completely for the purposes of input snapshotting.
*/
IGNORE_VALUE
}

View File

@ -18,8 +18,10 @@
*/
package org.elasticsearch.gradle.precommit;
import de.thetaphi.forbiddenapis.cli.CliMain;
import org.apache.commons.io.output.NullOutputStream;
import org.elasticsearch.gradle.JdkJarHellCheck;
import org.elasticsearch.gradle.OS;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.JavaVersion;
@ -51,6 +53,7 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
@ -69,6 +72,12 @@ public class ThirdPartyAuditTask extends DefaultTask {
private static final Pattern VIOLATION_PATTERN = Pattern.compile(
"\\s\\sin ([a-zA-Z0-9$.]+) \\(.*\\)"
);
private static final int SIG_KILL_EXIT_VALUE = 137;
private static final List<Integer> EXPECTED_EXIT_CODES = Arrays.asList(
CliMain.EXIT_SUCCESS,
CliMain.EXIT_VIOLATION,
CliMain.EXIT_UNSUPPORTED_JDK
);
private Set<String> missingClassExcludes = new TreeSet<>();
@ -327,7 +336,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
private String runForbiddenAPIsCli() throws IOException {
ByteArrayOutputStream errorOut = new ByteArrayOutputStream();
getProject().javaexec(spec -> {
ExecResult result = getProject().javaexec(spec -> {
if (javaHome != null) {
spec.setExecutable(javaHome + "/bin/java");
}
@ -336,6 +345,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
getRuntimeConfiguration(),
getProject().getConfigurations().getByName("compileOnly")
);
spec.jvmArgs("-Xmx1g");
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
spec.args(
"-f", getSignatureFile().getAbsolutePath(),
@ -348,10 +358,18 @@ public class ThirdPartyAuditTask extends DefaultTask {
}
spec.setIgnoreExitValue(true);
});
if (OS.current().equals(OS.LINUX) && result.getExitValue() == SIG_KILL_EXIT_VALUE) {
throw new IllegalStateException(
"Third party audit was killed buy SIGKILL, could be a victim of the Linux OOM killer"
);
}
final String forbiddenApisOutput;
try (ByteArrayOutputStream outputStream = errorOut) {
forbiddenApisOutput = outputStream.toString(StandardCharsets.UTF_8.name());
}
if (EXPECTED_EXIT_CODES.contains(result.getExitValue()) == false) {
throw new IllegalStateException("Forbidden APIs cli failed: " + forbiddenApisOutput);
}
return forbiddenApisOutput;
}

View File

@ -0,0 +1,37 @@
package org.elasticsearch.gradle.test;
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
import org.gradle.api.tasks.CacheableTask;
import org.gradle.api.tasks.Nested;
import org.gradle.api.tasks.testing.Test;
import java.util.ArrayList;
import java.util.Collection;
import static org.elasticsearch.gradle.testclusters.TestDistribution.INTEG_TEST;
/**
* Customized version of Gradle {@link Test} task which tracks a collection of {@link ElasticsearchCluster} as a task input. We must do this
* as a custom task type because the current {@link org.gradle.api.tasks.TaskInputs} runtime API does not have a way to register
* {@link Nested} inputs.
*/
@CacheableTask
public class RestTestRunnerTask extends Test {
private Collection<ElasticsearchCluster> clusters = new ArrayList<>();
public RestTestRunnerTask() {
super();
this.getOutputs().doNotCacheIf("Build cache is only enabled for tests against clusters using the 'integ-test' distribution",
task -> clusters.stream().flatMap(c -> c.getNodes().stream()).anyMatch(n -> n.getTestDistribution() != INTEG_TEST));
}
@Nested
public Collection<ElasticsearchCluster> getClusters() {
return clusters;
}
public void testCluster(ElasticsearchCluster cluster) {
this.clusters.add(cluster);
}
}

View File

@ -18,15 +18,16 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.Named;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Nested;
import java.io.File;
import java.io.IOException;
@ -41,13 +42,12 @@ import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class ElasticsearchCluster implements TestClusterConfiguration {
public class ElasticsearchCluster implements TestClusterConfiguration, Named {
private static final Logger LOGGER = Logging.getLogger(ElasticsearchNode.class);
private static final int CLUSTER_UP_TIMEOUT = 40;
@ -58,22 +58,23 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
private final String clusterName;
private final NamedDomainObjectContainer<ElasticsearchNode> nodes;
private final File workingDirBase;
private final File artifactsExtractDir;
private final Function<Integer, ElasticsearchDistribution> distributionFactory;
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final GradleServicesAdapter services;
private final Project project;
public ElasticsearchCluster(String path, String clusterName, Project project, File artifactsExtractDir, File workingDirBase) {
public ElasticsearchCluster(String path, String clusterName, Project project,
Function<Integer, ElasticsearchDistribution> distributionFactory, File workingDirBase) {
this.path = path;
this.clusterName = clusterName;
this.project = project;
this.distributionFactory = distributionFactory;
this.workingDirBase = workingDirBase;
this.artifactsExtractDir = artifactsExtractDir;
this.services = GradleServicesAdapter.getInstance(project);
this.nodes = project.container(ElasticsearchNode.class);
this.nodes.add(
new ElasticsearchNode(
path, clusterName + "-0",
services, artifactsExtractDir, workingDirBase
)
project, workingDirBase, distributionFactory.apply(0)
)
);
// configure the cluster name eagerly so nodes know about it
this.nodes.all((node) -> node.defaultConfig.put("cluster.name", safeName(clusterName)));
@ -96,8 +97,8 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
for (int i = nodes.size() ; i < numberOfNodes; i++) {
this.nodes.add(new ElasticsearchNode(
path, clusterName + "-" + i, services, artifactsExtractDir, workingDirBase
));
path, clusterName + "-" + i, project, workingDirBase, distributionFactory.apply(i)
));
}
}
@ -119,8 +120,8 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
}
@Override
public void setDistribution(Distribution distribution) {
nodes.all(each -> each.setDistribution(distribution));
public void setTestDistribution(TestDistribution distribution) {
nodes.all(each -> each.setTestDistribution(distribution));
}
@Override
@ -153,6 +154,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.keystore(key, value));
}
@Override
public void keystore(String key, File value, PropertyNormalization normalization) {
nodes.all(each -> each.keystore(key, value, normalization));
}
@Override
public void keystore(String key, FileSupplier valueSupplier) {
nodes.all(each -> each.keystore(key, valueSupplier));
@ -163,11 +169,21 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.setting(key, value));
}
@Override
public void setting(String key, String value, PropertyNormalization normalization) {
nodes.all(each -> each.setting(key, value, normalization));
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier) {
nodes.all(each -> each.setting(key, valueSupplier));
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
nodes.all(each -> each.setting(key, valueSupplier, normalization));
}
@Override
public void systemProperty(String key, String value) {
nodes.all(each -> each.systemProperty(key, value));
@ -178,6 +194,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.systemProperty(key, valueSupplier));
}
@Override
public void systemProperty(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
nodes.all(each -> each.systemProperty(key, valueSupplier, normalization));
}
@Override
public void environment(String key, String value) {
nodes.all(each -> each.environment(key, value));
@ -189,13 +210,13 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
}
@Override
public void jvmArgs(String... values) {
nodes.all(each -> each.jvmArgs(values));
public void environment(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
nodes.all(each -> each.environment(key, valueSupplier, normalization));
}
@Override
public void jvmArgs(Supplier<String[]> valueSupplier) {
nodes.all(each -> each.jvmArgs(valueSupplier));
public void jvmArgs(String... values) {
nodes.all(each -> each.jvmArgs(values));
}
@Override
@ -226,7 +247,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
for (ElasticsearchNode node : nodes) {
if (nodeNames != null) {
// Can only configure master nodes if we have node names defined
if (Version.fromString(node.getVersion()).getMajor() >= 7) {
if (node.getVersion().getMajor() >= 7) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
node.defaultConfig.put("discovery.seed_providers", "file");
node.defaultConfig.put("discovery.seed_hosts", "[]");
@ -246,6 +267,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(node -> node.extraConfigFile(destination, from));
}
@Override
public void extraConfigFile(String destination, File from, PropertyNormalization normalization) {
nodes.all(node -> node.extraConfigFile(destination, from, normalization));
}
@Override
public void user(Map<String, String> userSpec) {
nodes.all(node -> node.user(userSpec));
@ -311,12 +337,6 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
return nodes.stream().noneMatch(node -> node.isProcessAlive() == false);
}
void eachVersionedDistribution(BiConsumer<String, Distribution> consumer) {
nodes.forEach(each -> {
consumer.accept(each.getVersion(), each.getDistribution());
});
}
public ElasticsearchNode singleNode() {
if (nodes.size() != 1) {
throw new IllegalStateException(
@ -356,6 +376,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
});
}
@Nested
public NamedDomainObjectContainer<ElasticsearchNode> getNodes() {
return nodes;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;

View File

@ -18,34 +18,54 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.LazyPropertyList;
import org.elasticsearch.gradle.LazyPropertyMap;
import org.elasticsearch.gradle.LoggedExec;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.Action;
import org.gradle.api.Named;
import org.gradle.api.Project;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Classpath;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Nested;
import org.gradle.api.tasks.PathSensitive;
import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.util.PatternFilterable;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.LineNumberReader;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -67,6 +87,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private static final Logger LOGGER = Logging.getLogger(ElasticsearchNode.class);
private static final int ES_DESTROY_TIMEOUT = 20;
private static final TimeUnit ES_DESTROY_TIMEOUT_UNIT = TimeUnit.SECONDS;
private static final int NODE_UP_TIMEOUT = 2;
private static final TimeUnit NODE_UP_TIMEOUT_UNIT = TimeUnit.MINUTES;
private static final int ADDITIONAL_CONFIG_TIMEOUT = 15;
@ -74,28 +95,34 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private static final List<String> OVERRIDABLE_SETTINGS = Arrays.asList(
"path.repo",
"discovery.seed_providers"
);
private static final int TAIL_LOG_MESSAGES_COUNT = 40;
private static final List<String> MESSAGES_WE_DONT_CARE_ABOUT = Arrays.asList(
"Option UseConcMarkSweepGC was deprecated",
"is a pre-release version of Elasticsearch",
"max virtual memory areas vm.max_map_count"
);
private final String path;
private final String name;
private final GradleServicesAdapter services;
private final Project project;
private final AtomicBoolean configurationFrozen = new AtomicBoolean(false);
private final Path artifactsExtractDir;
private final Path workingDir;
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final List<URI> plugins = new ArrayList<>();
private final List<File> modules = new ArrayList<>();
private final Map<String, Supplier<CharSequence>> settings = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> keystoreSettings = new LinkedHashMap<>();
private final Map<String, FileSupplier> keystoreFiles = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> systemProperties = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> environment = new LinkedHashMap<>();
private final List<Supplier<List<CharSequence>>> jvmArgs = new ArrayList<>();
private final Map<String, File> extraConfigFiles = new HashMap<>();
final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>();
private final LazyPropertyMap<String, CharSequence> settings = new LazyPropertyMap<>("Settings", this);
private final LazyPropertyMap<String, CharSequence> keystoreSettings = new LazyPropertyMap<>("Keystore", this);
private final LazyPropertyMap<String, File> keystoreFiles = new LazyPropertyMap<>("Keystore files", this, FileEntry::new);
private final LazyPropertyMap<String, CharSequence> systemProperties = new LazyPropertyMap<>("System properties", this);
private final LazyPropertyMap<String, CharSequence> environment = new LazyPropertyMap<>("Environment", this);
private final LazyPropertyList<CharSequence> jvmArgs = new LazyPropertyList<>("JVM arguments", this);
private final LazyPropertyMap<String, File> extraConfigFiles = new LazyPropertyMap<>("Extra config files", this, FileEntry::new);
private final List<Map<String, String>> credentials = new ArrayList<>();
final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>();
private final Path confPathRepo;
private final Path configFile;
@ -107,19 +134,21 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final Path esStderrFile;
private final Path tmpDir;
private Distribution distribution;
private String version;
private TestDistribution testDistribution;
private ElasticsearchDistribution distribution;
private File javaHome;
private volatile Process esProcess;
private Function<String, String> nameCustomization = Function.identity();
private boolean isWorkingDirConfigured = false;
ElasticsearchNode(String path, String name, GradleServicesAdapter services, File artifactsExtractDir, File workingDirBase) {
ElasticsearchNode(String path, String name, Project project, File workingDirBase,
ElasticsearchDistribution distribution) {
this.path = path;
this.name = name;
this.services = services;
this.artifactsExtractDir = artifactsExtractDir.toPath();
this.project = project;
this.workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath();
this.distribution = distribution;
confPathRepo = workingDir.resolve("repo");
configFile = workingDir.resolve("config/elasticsearch.yml");
confPathData = workingDir.resolve("data");
@ -130,14 +159,18 @@ public class ElasticsearchNode implements TestClusterConfiguration {
esStderrFile = confPathLogs.resolve("es.stderr.log");
tmpDir = workingDir.resolve("tmp");
waitConditions.put("ports files", this::checkPortsFilesExistWithDelay);
setTestDistribution(TestDistribution.INTEG_TEST);
setVersion(VersionProperties.getElasticsearch());
}
public String getName() {
return nameCustomization.apply(name);
}
public String getVersion() {
return version;
@Internal
public Version getVersion() {
return distribution.getVersion();
}
@Override
@ -145,17 +178,35 @@ public class ElasticsearchNode implements TestClusterConfiguration {
requireNonNull(version, "null version passed when configuring test cluster `" + this + "`");
checkFrozen();
this.version = version;
this.distribution.setVersion(version);
}
public Distribution getDistribution() {
@Internal
public TestDistribution getTestDistribution() {
return testDistribution;
}
// package private just so test clusters plugin can access to wire up task dependencies
@Internal
ElasticsearchDistribution getDistribution() {
return distribution;
}
@Override
public void setDistribution(Distribution distribution) {
requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`");
public void setTestDistribution(TestDistribution testDistribution) {
requireNonNull(testDistribution, "null distribution passed when configuring test cluster `" + this + "`");
checkFrozen();
this.distribution = distribution;
this.testDistribution = testDistribution;
if (testDistribution == TestDistribution.INTEG_TEST) {
this.distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP);
} else {
this.distribution.setType(ElasticsearchDistribution.Type.ARCHIVE);
if (testDistribution == TestDistribution.DEFAULT) {
this.distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT);
} else {
this.distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS);
}
}
}
@Override
@ -177,88 +228,81 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
public void keystore(String key, String value) {
addSupplier("Keystore", keystoreSettings, key, value);
keystoreSettings.put(key, value);
}
@Override
public void keystore(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Keystore", keystoreSettings, key, valueSupplier);
keystoreSettings.put(key, valueSupplier);
}
@Override
public void keystore(String key, File value) {
requireNonNull(value, "keystore value was null when configuring test cluster`" + this + "`");
keystore(key, () -> value);
keystoreFiles.put(key, value);
}
@Override
public void keystore(String key, File value, PropertyNormalization normalization) {
keystoreFiles.put(key, value, normalization);
}
@Override
public void keystore(String key, FileSupplier valueSupplier) {
requireNonNull(key, "Keystore" + " key was null when configuring test cluster `" + this + "`");
requireNonNull(valueSupplier, "Keystore" + " value supplier was null when configuring test cluster `" + this + "`");
keystoreFiles.put(key, valueSupplier);
}
@Override
public void setting(String key, String value) {
addSupplier("Settings", settings, key, value);
settings.put(key, value);
}
@Override
public void setting(String key, String value, PropertyNormalization normalization) {
settings.put(key, value, normalization);
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Setting", settings, key, valueSupplier);
settings.put(key, valueSupplier);
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
settings.put(key, valueSupplier, normalization);
}
@Override
public void systemProperty(String key, String value) {
addSupplier("Java System property", systemProperties, key, value);
systemProperties.put(key, value);
}
@Override
public void systemProperty(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Java System property", systemProperties, key, valueSupplier);
systemProperties.put(key, valueSupplier);
}
@Override
public void systemProperty(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
systemProperties.put(key, valueSupplier, normalization);
}
@Override
public void environment(String key, String value) {
addSupplier("Environment variable", environment, key, value);
environment.put(key, value);
}
@Override
public void environment(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Environment variable", environment, key, valueSupplier);
environment.put(key, valueSupplier);
}
@Override
public void environment(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
environment.put(key, valueSupplier, normalization);
}
public void jvmArgs(String... values) {
for (String value : values) {
requireNonNull(value, "jvm argument was null when configuring test cluster `" + this + "`");
}
jvmArgs.add(() -> Arrays.asList(values));
}
public void jvmArgs(Supplier<String[]> valueSupplier) {
requireNonNull(valueSupplier, "jvm argument supplier was null when configuring test cluster `" + this + "`");
jvmArgs.add(() -> Arrays.asList(valueSupplier.get()));
}
private void addSupplier(String name, Map<String, Supplier<CharSequence>> collector, String key, Supplier<CharSequence> valueSupplier) {
requireNonNull(key, name + " key was null when configuring test cluster `" + this + "`");
requireNonNull(valueSupplier, name + " value supplier was null when configuring test cluster `" + this + "`");
collector.put(key, valueSupplier);
}
private void addSupplier(String name, Map<String, Supplier<CharSequence>> collector, String key, String actualValue) {
requireNonNull(actualValue, name + " value was null when configuring test cluster `" + this + "`");
addSupplier(name, collector, key, () -> actualValue);
}
private void checkSuppliers(String name, Collection<Supplier<CharSequence>> collector) {
collector.forEach(suplier ->
requireNonNull(
suplier.get().toString(),
name + " supplied value was null when configuring test cluster `" + this + "`"
)
);
jvmArgs.addAll(Arrays.asList(values));
}
public Path getConfigDir() {
@ -268,7 +312,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
public void freeze() {
requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`");
requireNonNull(version, "null version passed when configuring test cluster `" + this + "`");
requireNonNull(getVersion(), "null version passed when configuring test cluster `" + this + "`");
requireNonNull(javaHome, "null javaHome passed when configuring test cluster `" + this + "`");
LOGGER.info("Locking configuration of `{}`", this);
configurationFrozen.set(true);
@ -301,30 +345,26 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public synchronized void start() {
LOGGER.info("Starting `{}`", this);
Path distroArtifact = artifactsExtractDir
.resolve(distribution.getGroup())
.resolve("elasticsearch-" + getVersion());
if (Files.exists(distroArtifact) == false) {
throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact);
if (Files.exists(getExtractedDistributionDir()) == false) {
throw new TestClustersException("Can not start " + this + ", missing: " + getExtractedDistributionDir());
}
if (Files.isDirectory(distroArtifact) == false) {
throw new TestClustersException("Can not start " + this + ", is not a directory: " + distroArtifact);
if (Files.isDirectory(getExtractedDistributionDir()) == false) {
throw new TestClustersException("Can not start " + this + ", is not a directory: " + getExtractedDistributionDir());
}
try {
if (isWorkingDirConfigured == false) {
logToProcessStdout("Configuring working directory: " + workingDir);
// Only configure working dir once so we don't loose data on restarts
// Only configure working dir once so we don't lose data on restarts
isWorkingDirConfigured = true;
createWorkingDir(distroArtifact);
createWorkingDir(getExtractedDistributionDir());
}
} catch (IOException e) {
throw new UncheckedIOException("Failed to create working directory for " + this, e);
}
createConfiguration();
if(plugins.isEmpty() == false) {
if (plugins.isEmpty() == false) {
logToProcessStdout("Installing " + plugins.size() + " plugins");
plugins.forEach(plugin -> runElaticsearchBinScript(
"elasticsearch-plugin",
@ -336,13 +376,12 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logToProcessStdout("Adding " + keystoreSettings.size() + " keystore settings and " + keystoreFiles.size() + " keystore files");
runElaticsearchBinScript("elasticsearch-keystore", "create");
checkSuppliers("Keystore", keystoreSettings.values());
keystoreSettings.forEach((key, value) ->
runElaticsearchBinScriptWithInput(value.get().toString(), "elasticsearch-keystore", "add", "-x", key)
runElaticsearchBinScriptWithInput(value.toString(), "elasticsearch-keystore", "add", "-x", key)
);
for (Map.Entry<String, FileSupplier> entry : keystoreFiles.entrySet()) {
File file = entry.getValue().get();
for (Map.Entry<String, File> entry : keystoreFiles.entrySet()) {
File file = entry.getValue();
requireNonNull(file, "supplied keystoreFile was null when configuring " + this);
if (file.exists() == false) {
throw new TestClustersException("supplied keystore file " + file + " does not exist, require for " + this);
@ -362,9 +401,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
credentials.forEach(paramMap -> runElaticsearchBinScript(
"elasticsearch-users",
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
));
}
@ -402,7 +441,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private boolean isSettingMissingOrTrue(String name) {
return Boolean.valueOf(settings.getOrDefault(name, () -> "false").get().toString());
return Boolean.valueOf(settings.getOrDefault(name, "false").toString());
}
private void copyExtraConfigFiles() {
@ -410,32 +449,33 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logToProcessStdout("Setting up " + extraConfigFiles.size() + " additional config files");
}
extraConfigFiles.forEach((destination, from) -> {
if (Files.exists(from.toPath()) == false) {
throw new TestClustersException("Can't create extra config file from " + from + " for " + this +
" as it does not exist");
}
Path dst = configFile.getParent().resolve(destination);
try {
Files.createDirectories(dst.getParent());
Files.copy(from.toPath(), dst, StandardCopyOption.REPLACE_EXISTING);
LOGGER.info("Added extra config file {} for {}", destination, this);
} catch (IOException e) {
throw new UncheckedIOException("Can't create extra config file for", e);
}
});
if (Files.exists(from.toPath()) == false) {
throw new TestClustersException("Can't create extra config file from " + from + " for " + this +
" as it does not exist");
}
Path dst = configFile.getParent().resolve(destination);
try {
Files.createDirectories(dst.getParent());
Files.copy(from.toPath(), dst, StandardCopyOption.REPLACE_EXISTING);
LOGGER.info("Added extra config file {} for {}", destination, this);
} catch (IOException e) {
throw new UncheckedIOException("Can't create extra config file for", e);
}
});
}
private void installModules() {
if (distribution == Distribution.INTEG_TEST) {
if (testDistribution == TestDistribution.INTEG_TEST) {
logToProcessStdout("Installing " + modules.size() + "modules");
for (File module : modules) {
Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "").replace("-" + version, ""));
Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "")
.replace("-" + version, ""));
// only install modules that are not already bundled with the integ-test distribution
if (Files.exists(destination) == false) {
services.copy(spec -> {
project.copy(spec -> {
if (module.getName().toLowerCase().endsWith(".zip")) {
spec.from(services.zipTree(module));
spec.from(project.zipTree(module));
} else if (module.isDirectory()) {
spec.from(module);
} else {
@ -460,6 +500,15 @@ public class ElasticsearchNode implements TestClusterConfiguration {
extraConfigFiles.put(destination, from);
}
@Override
public void extraConfigFile(String destination, File from, PropertyNormalization normalization) {
if (destination.contains("..")) {
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination +
" for " + this);
}
extraConfigFiles.put(destination, from, normalization);
}
@Override
public void user(Map<String, String> userSpec) {
Set<String> keys = new HashSet<>(userSpec.keySet());
@ -469,9 +518,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (keys.isEmpty() == false) {
throw new TestClustersException("Unknown keys in user definition " + keys + " for " + this);
}
Map<String,String> cred = new LinkedHashMap<>();
cred.put("useradd", userSpec.getOrDefault("username","test_user"));
cred.put("-p", userSpec.getOrDefault("password","x-pack-test-password"));
Map<String, String> cred = new LinkedHashMap<>();
cred.put("useradd", userSpec.getOrDefault("username", "test_user"));
cred.put("-p", userSpec.getOrDefault("password", "x-pack-test-password"));
cred.put("-r", userSpec.getOrDefault("role", "superuser"));
credentials.add(cred);
}
@ -485,7 +534,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
"Is this the distribution you expect it to be ?");
}
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
services.loggedExec(spec -> {
LoggedExec.exec(project, spec -> {
spec.setEnvironment(getESEnvironment());
spec.workingDir(workingDir);
spec.executable(
@ -526,19 +575,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
defaultEnv.put("ES_PATH_CONF", configFile.getParent().toString());
String systemPropertiesString = "";
if (systemProperties.isEmpty() == false) {
checkSuppliers("Java System property", systemProperties.values());
systemPropertiesString = " " + systemProperties.entrySet().stream()
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue().get())
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue())
.collect(Collectors.joining(" "));
}
String jvmArgsString = "";
if (jvmArgs.isEmpty() == false) {
jvmArgsString = " " + jvmArgs.stream()
.map(Supplier::get)
.peek(charSequences -> requireNonNull(charSequences, "Jvm argument supplier returned null while configuring " + this))
.flatMap(Collection::stream)
.peek(argument -> {
requireNonNull(argument, "Jvm argument supplier returned null while configuring " + this);
if (argument.toString().startsWith("-D")) {
throw new TestClustersException("Invalid jvm argument `" + argument +
"` configure as systemProperty instead for " + this
@ -562,8 +606,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
);
}
checkSuppliers("Environment variable", environment.values());
environment.forEach((key, value) -> defaultEnv.put(key, value.get().toString()));
environment.forEach((key, value) -> defaultEnv.put(key, value.toString()));
return defaultEnv;
}
@ -686,14 +729,73 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private void logFileContents(String description, Path from) {
LOGGER.error("{} `{}`", description, this);
try(Stream<String> lines = Files.lines(from, StandardCharsets.UTF_8)) {
lines
.map(line -> " " + line)
.forEach(LOGGER::error);
final Map<String, Integer> errorsAndWarnings = new LinkedHashMap<>();
LinkedList<String> ring = new LinkedList<>();
try (LineNumberReader reader = new LineNumberReader(Files.newBufferedReader(from))) {
for (String line = reader.readLine(); line != null ; line = reader.readLine()) {
final String lineToAdd;
if (ring.isEmpty()) {
lineToAdd = line;
} else {
if (line.startsWith("[")) {
lineToAdd = line;
// check to see if the previous message (possibly combined from multiple lines) was an error or
// warning as we want to show all of them
String previousMessage = normalizeLogLine(ring.getLast());
if (MESSAGES_WE_DONT_CARE_ABOUT.stream().noneMatch(previousMessage::contains) &&
(previousMessage.contains("ERROR") || previousMessage.contains("WARN"))) {
errorsAndWarnings.put(
previousMessage,
errorsAndWarnings.getOrDefault(previousMessage, 0) + 1
);
}
} else {
// We combine multi line log messages to make sure we never break exceptions apart
lineToAdd = ring.removeLast() + "\n" + line;
}
}
ring.add(lineToAdd);
if (ring.size() >= TAIL_LOG_MESSAGES_COUNT) {
ring.removeFirst();
}
}
} catch (IOException e) {
throw new UncheckedIOException("Failed to tail log " + this, e);
}
if (errorsAndWarnings.isEmpty() == false || ring.isEmpty() == false) {
LOGGER.error("\n=== {} `{}` ===", description, this);
}
if (errorsAndWarnings.isEmpty() == false) {
LOGGER.lifecycle("\n» ↓ errors and warnings from " + from + "");
errorsAndWarnings.forEach((message, count) -> {
LOGGER.lifecycle("» " + message.replace("\n", "\"));
if (count > 1) {
LOGGER.lifecycle("» ↑ repeated " + count + " times ↑");
}
});
}
ring.removeIf(line -> MESSAGES_WE_DONT_CARE_ABOUT.stream().anyMatch(line::contains));
if (ring.isEmpty() == false) {
LOGGER.lifecycle("» ↓ last " + TAIL_LOG_MESSAGES_COUNT + " non error or warning messages from " + from + "");
ring.forEach(message -> {
if (errorsAndWarnings.containsKey(normalizeLogLine(message)) == false) {
LOGGER.lifecycle("» " + message.replace("\n", "\"));
}
});
}
}
private String normalizeLogLine(String line) {
if (line.contains("ERROR")) {
return line.substring(line.indexOf("ERROR"));
}
if (line.contains("WARN")) {
return line.substring(line.indexOf("WARN"));
}
return line;
}
private void waitForProcessToExit(ProcessHandle processHandle) {
@ -723,12 +825,12 @@ public class ElasticsearchNode implements TestClusterConfiguration {
* We remove write permissions to make sure files are note mistakenly edited ( e.x. the config file ) and changes
* reflected across all copies. Permissions are retained to be able to replace the links.
*
* @param sourceRoot where to copy from
* @param sourceRoot where to copy from
* @param destinationRoot destination to link to
*/
private void syncWithLinks(Path sourceRoot, Path destinationRoot) {
if (Files.exists(destinationRoot)) {
services.delete(destinationRoot);
project.delete(destinationRoot);
}
try (Stream<Path> stream = Files.walk(sourceRoot)) {
@ -761,7 +863,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
}
private void createConfiguration() {
private void createConfiguration() {
String nodeName = nameCustomization.apply(safeName(name));
if (nodeName != null) {
defaultConfig.put("node.name", nodeName);
@ -773,7 +875,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
defaultConfig.put("node.attr.testattr", "test");
defaultConfig.put("node.portsfile", "true");
defaultConfig.put("http.port", "0");
if (Version.fromString(version).onOrAfter(Version.fromString("6.7.0"))) {
if (getVersion().onOrAfter(Version.fromString("6.7.0"))) {
defaultConfig.put("transport.port", "0");
} else {
defaultConfig.put("transport.tcp.port", "0");
@ -783,31 +885,28 @@ public class ElasticsearchNode implements TestClusterConfiguration {
defaultConfig.put("cluster.routing.allocation.disk.watermark.high", "1b");
// increase script compilation limit since tests can rapid-fire script compilations
defaultConfig.put("script.max_compilations_rate", "2048/1m");
if (Version.fromString(version).getMajor() >= 6) {
if (getVersion().getMajor() >= 6) {
defaultConfig.put("cluster.routing.allocation.disk.watermark.flood_stage", "1b");
}
// Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
// over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
// can retry on circuit breaking exceptions, we can revert again to the default configuration.
if (Version.fromString(version).getMajor() >= 7) {
defaultConfig.put("indices.breaker.total.use_real_memory", "false");
if (getVersion().getMajor() >= 7) {
defaultConfig.put("indices.breaker.total.use_real_memory", "false");
}
// Don't wait for state, just start up quickly. This will also allow new and old nodes in the BWC case to become the master
defaultConfig.put("discovery.initial_state_timeout", "0s");
defaultConfig.put("discovery.initial_state_timeout", "0s");
checkSuppliers("Settings", settings.values());
Map<String, String> userConfig = settings.entrySet().stream()
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue().get().toString()));
HashSet<String> overriden = new HashSet<>(defaultConfig.keySet());
overriden.retainAll(userConfig.keySet());
overriden.retainAll(settings.keySet());
overriden.removeAll(OVERRIDABLE_SETTINGS);
if (overriden.isEmpty() ==false) {
if (overriden.isEmpty() == false) {
throw new IllegalArgumentException(
"Testclusters does not allow the following settings to be changed:" + overriden + " for " + this
);
}
// Make sure no duplicate config keys
userConfig.keySet().stream()
settings.keySet().stream()
.filter(OVERRIDABLE_SETTINGS::contains)
.forEach(defaultConfig::remove);
@ -818,7 +917,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Files.write(
configFile,
Stream.concat(
userConfig.entrySet().stream(),
settings.entrySet().stream(),
defaultConfig.entrySet().stream()
)
.map(entry -> entry.getKey() + ": " + entry.getValue())
@ -833,7 +932,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private void checkFrozen() {
if (configurationFrozen.get()) {
throw new IllegalStateException("Configuration for " + this + " can not be altered, already locked");
throw new IllegalStateException("Configuration for " + this + " can not be altered, already locked");
}
}
@ -858,11 +957,97 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private List<String> readPortsFile(Path file) throws IOException {
try(Stream<String> lines = Files.lines(file, StandardCharsets.UTF_8)) {
try (Stream<String> lines = Files.lines(file, StandardCharsets.UTF_8)) {
return lines.map(String::trim).collect(Collectors.toList());
}
}
private Path getExtractedDistributionDir() {
return Paths.get(distribution.getExtracted().toString()).resolve("elasticsearch-" + version);
}
private List<File> getInstalledFileSet(Action<? super PatternFilterable> filter) {
return Stream.concat(
plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file")).map(File::new),
modules.stream()
)
.filter(File::exists)
// TODO: We may be able to simplify this with Gradle 5.6
// https://docs.gradle.org/nightly/release-notes.html#improved-handling-of-zip-archives-on-classpaths
.map(zipFile -> project.zipTree(zipFile).matching(filter))
.flatMap(tree -> tree.getFiles().stream())
.sorted(Comparator.comparing(File::getName))
.collect(Collectors.toList());
}
@Input
private Set<URI> getRemotePlugins() {
Set<URI> file = plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file") == false).collect(Collectors.toSet());
return file;
}
@Classpath
private List<File> getInstalledClasspath() {
return getInstalledFileSet(filter -> filter.include("**/*.jar"));
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private List<File> getInstalledFiles() {
return getInstalledFileSet(filter -> filter.exclude("**/*.jar"));
}
@Classpath
private List<File> getDistributionClasspath() {
ArrayList<File> files = new ArrayList<>(project.fileTree(getExtractedDistributionDir())
.matching(filter -> filter.include("**/*.jar"))
.getFiles());
files.sort(Comparator.comparing(File::getName));
return files;
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private FileCollection getDistributionFiles() {
return project.fileTree(getExtractedDistributionDir()).minus(project.files(getDistributionClasspath()));
}
@Nested
private Map<String, CharSequence> getKeystoreSettings() {
return keystoreSettings;
}
@Nested
private Map<String, File> getKeystoreFiles() {
return keystoreFiles;
}
@Nested
private Map<String, CharSequence> getSettings() {
return settings;
}
@Nested
private Map<String, CharSequence> getSystemProperties() {
return systemProperties;
}
@Nested
private Map<String, CharSequence> getEnvironment() {
return environment;
}
@Nested
private List<CharSequence> getJvmArgs() {
return jvmArgs;
}
@Nested
private Map<String, File> getExtraConfigFiles() {
return extraConfigFiles;
}
@Override
public boolean isProcessAlive() {
requireNonNull(
@ -882,9 +1067,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
ADDITIONAL_CONFIG_TIMEOUT_UNIT.toMillis(ADDITIONAL_CONFIG_TIMEOUT *
(
plugins.size() +
keystoreFiles.size() +
keystoreSettings.size() +
credentials.size()
keystoreFiles.size() +
keystoreSettings.size() +
credentials.size()
)
),
TimeUnit.MILLISECONDS,
@ -911,6 +1096,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
return "node{" + path + ":" + name + "}";
}
@Input
List<Map<String, String>> getCredentials() {
return credentials;
}
@ -930,7 +1116,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public boolean isHttpSslEnabled() {
return Boolean.valueOf(
settings.getOrDefault("xpack.security.http.ssl.enabled", () -> "false").get().toString()
settings.getOrDefault("xpack.security.http.ssl.enabled", "false").toString()
);
}
@ -938,28 +1124,50 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (settings.containsKey("xpack.security.http.ssl.certificate_authorities")) {
wait.setCertificateAuthorities(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.certificate_authorities").get().toString())
.resolve(settings.get("xpack.security.http.ssl.certificate_authorities").toString())
.toFile()
);
}
if (settings.containsKey("xpack.security.http.ssl.certificate")) {
wait.setCertificateAuthorities(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.certificate").get().toString())
.resolve(settings.get("xpack.security.http.ssl.certificate").toString())
.toFile()
);
}
if (settings.containsKey("xpack.security.http.ssl.keystore.path")) {
wait.setTrustStoreFile(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.keystore.path").get().toString())
.resolve(settings.get("xpack.security.http.ssl.keystore.path").toString())
.toFile()
);
}
if (keystoreSettings.containsKey("xpack.security.http.ssl.keystore.secure_password")) {
wait.setTrustStorePassword(
keystoreSettings.get("xpack.security.http.ssl.keystore.secure_password").get().toString()
keystoreSettings.get("xpack.security.http.ssl.keystore.secure_password").toString()
);
}
}
private static class FileEntry implements Named {
private String name;
private File file;
FileEntry(String name, File file) {
this.name = name;
this.file = file;
}
@Input
@Override
public String getName() {
return name;
}
@InputFile
@PathSensitive(PathSensitivity.NONE)
public File getFile() {
return file;
}
}
}

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.PropertyNormalization;
import org.gradle.api.logging.Logging;
import org.slf4j.Logger;
@ -38,7 +38,7 @@ public interface TestClusterConfiguration {
void setVersion(String version);
void setDistribution(Distribution distribution);
void setTestDistribution(TestDistribution distribution);
void plugin(URI plugin);
@ -52,23 +52,31 @@ public interface TestClusterConfiguration {
void keystore(String key, File value);
void keystore(String key, File value, PropertyNormalization normalization);
void keystore(String key, FileSupplier valueSupplier);
void setting(String key, String value);
void setting(String key, String value, PropertyNormalization normalization);
void setting(String key, Supplier<CharSequence> valueSupplier);
void setting(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization);
void systemProperty(String key, String value);
void systemProperty(String key, Supplier<CharSequence> valueSupplier);
void systemProperty(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization);
void environment(String key, String value);
void environment(String key, Supplier<CharSequence> valueSupplier);
void jvmArgs(String... values);
void environment(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization);
void jvmArgs(Supplier<String[]> valueSupplier);
void jvmArgs(String... values);
void freeze();
@ -80,6 +88,8 @@ public interface TestClusterConfiguration {
void extraConfigFile(String destination, File from);
void extraConfigFile(String destination, File from, PropertyNormalization normalization);
void user(Map<String, String> userSpec);
String getHttpSocketURI();
@ -158,7 +168,5 @@ public interface TestClusterConfiguration {
.replaceAll("[^a-zA-Z0-9]+", "-");
}
boolean isProcessAlive();
}

View File

@ -19,20 +19,15 @@
package org.elasticsearch.gradle.testclusters;
import groovy.lang.Closure;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.Action;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.test.RestTestRunnerTask;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
import org.gradle.api.credentials.HttpHeaderCredentials;
import org.gradle.api.execution.TaskActionListener;
import org.gradle.api.execution.TaskExecutionListener;
import org.gradle.api.file.FileTree;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.plugins.ExtraPropertiesExtension;
@ -52,8 +47,6 @@ public class TestClustersPlugin implements Plugin<Project> {
private static final String LIST_TASK_NAME = "listTestClusters";
public static final String EXTENSION_NAME = "testClusters";
private static final String HELPER_CONFIGURATION_PREFIX = "testclusters";
private static final String SYNC_ARTIFACTS_TASK_NAME = "syncTestClustersArtifacts";
private static final Logger logger = Logging.getLogger(TestClustersPlugin.class);
private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure";
@ -63,13 +56,9 @@ public class TestClustersPlugin implements Plugin<Project> {
private final Set<ElasticsearchCluster> runningClusters = new HashSet<>();
private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false"));
public static String getHelperConfigurationName(String version) {
return HELPER_CONFIGURATION_PREFIX + "-" + version;
}
@Override
public void apply(Project project) {
Project rootProject = project.getRootProject();
project.getPlugins().apply(DistributionDownloadPlugin.class);
// enable the DSL to describe clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = createTestClustersContainerExtension(project);
@ -93,17 +82,11 @@ public class TestClustersPlugin implements Plugin<Project> {
// After each task we determine if there are clusters that are no longer needed.
configureStopClustersHook(project);
// Since we have everything modeled in the DSL, add all the required dependencies e.x. the distribution to the
// configuration so the user doesn't have to repeat this.
autoConfigureClusterDependencies(project, rootProject, container);
}
private static File getExtractDir(Project project) {
return new File(project.getRootProject().getBuildDir(), "testclusters/extract/");
}
private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) {
NamedDomainObjectContainer<ElasticsearchDistribution> distros = DistributionDownloadPlugin.getContainer(project);
// Create an extensions that allows describing clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = project.container(
ElasticsearchCluster.class,
@ -111,7 +94,7 @@ public class TestClustersPlugin implements Plugin<Project> {
project.getPath(),
name,
project,
new File(project.getRootProject().getBuildDir(), "testclusters/extract"),
i -> distros.create(name + "-" + i),
new File(project.getBuildDir(), "testclusters")
)
);
@ -152,9 +135,12 @@ public class TestClustersPlugin implements Plugin<Project> {
"Task, but got: " + thisObject.getClass());
}
usedClusters.computeIfAbsent(task, k -> new ArrayList<>()).add(cluster);
((Task) thisObject).dependsOn(
project.getRootProject().getTasks().getByName(SYNC_ARTIFACTS_TASK_NAME)
);
for (ElasticsearchNode node : cluster.getNodes()) {
((Task) thisObject).dependsOn(node.getDistribution().getExtracted());
}
if (thisObject instanceof RestTestRunnerTask) {
((RestTestRunnerTask) thisObject).testCluster(cluster);
}
}
})
);
@ -281,145 +267,4 @@ public class TestClustersPlugin implements Plugin<Project> {
}
cluster.stop(taskFailed);
}
/**
* Boilerplate to get testClusters container extension
*
* Equivalent to project.testClusters in the DSL
*/
@SuppressWarnings("unchecked")
public static NamedDomainObjectContainer<ElasticsearchCluster> getNodeExtension(Project project) {
return (NamedDomainObjectContainer<ElasticsearchCluster>)
project.getExtensions().getByName(EXTENSION_NAME);
}
private static void autoConfigureClusterDependencies(
Project project,
Project rootProject,
NamedDomainObjectContainer<ElasticsearchCluster> container
) {
// Download integ test distribution from maven central
MavenArtifactRepository mavenCentral = project.getRepositories().mavenCentral();
mavenCentral.content(spec -> {
spec.includeGroupByRegex("org\\.elasticsearch\\.distribution\\..*");
});
// Other distributions from the download service
project.getRepositories().add(
project.getRepositories().ivy(spec -> {
spec.setUrl("https://artifacts.elastic.co/downloads");
spec.patternLayout(p -> p.artifact("elasticsearch/[module]-[revision](-[classifier]).[ext]"));
HttpHeaderCredentials headerConfig = spec.getCredentials(HttpHeaderCredentials.class);
headerConfig.setName("X-Elastic-No-KPI");
headerConfig.setValue("1");
spec.content(c-> c.includeGroupByRegex("org\\.elasticsearch\\.distribution\\..*"));
})
);
// We have a single task to sync the helper configuration to "artifacts dir"
// the clusters will look for artifacts there based on the naming conventions.
// Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in
// the build.
Boilerplate.maybeCreate(rootProject.getTasks(), SYNC_ARTIFACTS_TASK_NAME, onCreate -> {
onCreate.getOutputs().dir(getExtractDir(rootProject));
onCreate.getInputs().files(
project.getRootProject().getConfigurations().matching(conf -> conf.getName().startsWith(HELPER_CONFIGURATION_PREFIX))
);
onCreate.dependsOn(project.getRootProject().getConfigurations()
.matching(conf -> conf.getName().startsWith(HELPER_CONFIGURATION_PREFIX))
);
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
onCreate.doFirst(new Action<Task>() {
@Override
public void execute(Task task) {
// Clean up the extract dir first to make sure we have no stale files from older
// previous builds of the same distribution
project.delete(getExtractDir(rootProject));
}
});
onCreate.doLast(new Action<Task>() {
@Override
public void execute(Task task) {
project.getRootProject().getConfigurations()
.matching(config -> config.getName().startsWith(HELPER_CONFIGURATION_PREFIX))
.forEach(config -> project.copy(spec ->
config.getResolvedConfiguration()
.getResolvedArtifacts()
.forEach(resolvedArtifact -> {
final FileTree files;
File file = resolvedArtifact.getFile();
if (file.getName().endsWith(".zip")) {
files = project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
files = project.tarTree(file);
} else {
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
}
logger.info("Extracting {}@{}", resolvedArtifact, config);
spec.from(files, s -> s.into(resolvedArtifact.getModuleVersion().getId().getGroup()));
spec.into(getExtractDir(project));
}))
);
}
});
});
// When the project evaluated we know of all tasks that use clusters.
// Each of these have to depend on the artifacts being synced.
// We need afterEvaluate here despite the fact that container is a domain object, we can't implement this with
// all because fields can change after the fact.
project.afterEvaluate(ip -> container.forEach(esCluster ->
esCluster.eachVersionedDistribution((version, distribution) -> {
Configuration helperConfiguration = Boilerplate.maybeCreate(
rootProject.getConfigurations(),
getHelperConfigurationName(version),
onCreate ->
// We use a single configuration on the root project to resolve all testcluster dependencies ( like distros )
// at once, only once without the need to repeat it for each project. This pays off assuming that most
// projects use the same dependencies.
onCreate.setDescription(
"Internal helper configuration used by cluster configuration to download " +
"ES distributions and plugins for " + version
)
);
BwcVersions.UnreleasedVersionInfo unreleasedInfo;
final List<Version> unreleased;
{
ExtraPropertiesExtension extraProperties = project.getExtensions().getExtraProperties();
if (extraProperties.has("bwcVersions")) {
Object bwcVersionsObj = extraProperties.get("bwcVersions");
if (bwcVersionsObj instanceof BwcVersions == false) {
throw new IllegalStateException("Expected project.bwcVersions to be of type VersionCollection " +
"but instead it was " + bwcVersionsObj.getClass());
}
final BwcVersions bwcVersions = (BwcVersions) bwcVersionsObj;
unreleased = ((BwcVersions) bwcVersionsObj).getUnreleased();
unreleasedInfo = bwcVersions.unreleasedInfo(Version.fromString(version));
} else {
logger.info("No version information available, assuming all versions used are released");
unreleased = Collections.emptyList();
unreleasedInfo = null;
}
}
if (unreleased.contains(Version.fromString(version))) {
Map<String, Object> projectNotation = new HashMap<>();
projectNotation.put("path", unreleasedInfo.gradleProjectPath);
projectNotation.put("configuration", distribution.getLiveConfiguration());
rootProject.getDependencies().add(
helperConfiguration.getName(),
project.getDependencies().project(projectNotation)
);
} else {
rootProject.getDependencies().add(
helperConfiguration.getName(),
distribution.getGroup() + ":" +
distribution.getArtifactName() + ":" +
version +
(distribution.getClassifier().isEmpty() ? "" : ":" + distribution.getClassifier()) + "@" +
distribution.getFileExtension());
}
})));
}
}

View File

@ -0,0 +1,28 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.testclusters;
/**
* An enumeration of the distributions that may be used in test clusters.
*/
public enum TestDistribution {
INTEG_TEST,
DEFAULT,
OSS
}

View File

@ -22,9 +22,7 @@ import com.avast.gradle.dockercompose.ComposeExtension;
import com.avast.gradle.dockercompose.DockerComposePlugin;
import com.avast.gradle.dockercompose.tasks.ComposeUp;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.precommit.JarHellTask;
import org.elasticsearch.gradle.precommit.TestingConventionsTasks;
import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask;
import org.gradle.api.DefaultTask;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
@ -35,6 +33,9 @@ import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.testing.Test;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.Collections;
import java.util.function.BiConsumer;
@ -50,19 +51,25 @@ public class TestFixturesPlugin implements Plugin<Project> {
"testFixtures", TestFixtureExtension.class, project
);
if (project.file(DOCKER_COMPOSE_YML).exists()) {
// convenience boilerplate with build plugin
// Can't reference tasks that are implemented in Groovy, use reflection instead
disableTaskByType(tasks, getTaskClass("org.elasticsearch.gradle.precommit.LicenseHeadersTask"));
disableTaskByType(tasks, ThirdPartyAuditTask.class);
disableTaskByType(tasks, JarHellTask.class);
ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class);
File testfixturesDir = project.file("testfixtures_shared");
ext.set("testFixturesDir", testfixturesDir);
if (project.file(DOCKER_COMPOSE_YML).exists()) {
// the project that defined a test fixture can also use it
extension.fixtures.add(project);
Task buildFixture = project.getTasks().create("buildFixture");
Task pullFixture = project.getTasks().create("pullFixture");
Task preProcessFixture = project.getTasks().create("preProcessFixture");
preProcessFixture.doFirst((task) -> {
try {
Files.createDirectories(testfixturesDir.toPath());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
preProcessFixture.getOutputs().dir(testfixturesDir);
buildFixture.dependsOn(preProcessFixture);
pullFixture.dependsOn(preProcessFixture);
Task postProcessFixture = project.getTasks().create("postProcessFixture");
@ -90,6 +97,9 @@ public class TestFixturesPlugin implements Plugin<Project> {
pullFixture.dependsOn(tasks.getByName("composePull"));
tasks.getByName("composeUp").mustRunAfter(preProcessFixture);
tasks.getByName("composePull").mustRunAfter(preProcessFixture);
tasks.getByName("composeDown").doLast((task) -> {
project.delete(testfixturesDir);
});
configureServiceInfoForTask(
postProcessFixture,
@ -98,11 +108,19 @@ public class TestFixturesPlugin implements Plugin<Project> {
.getByType(ExtraPropertiesExtension.class).set(name, port)
);
}
} else {
project.afterEvaluate(spec -> {
if (extension.fixtures.isEmpty()) {
// if only one fixture is used, that's this one, but without a compose file that's not a valid configuration
throw new IllegalStateException("No " + DOCKER_COMPOSE_YML + " found for " + project.getPath() +
" nor does it use other fixtures.");
}
});
}
extension.fixtures
.matching(fixtureProject -> fixtureProject.equals(project) == false)
.all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath()));
.all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath()));
conditionTaskByType(tasks, extension, Test.class);
conditionTaskByType(tasks, extension, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask"));

View File

@ -0,0 +1,118 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant;
import org.elasticsearch.gradle.LoggingOutputStream;
import org.gradle.api.GradleScriptException;
import org.gradle.api.logging.Logger;
import org.gradle.internal.logging.progress.ProgressLogger;
import java.util.Formatter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Adapts an OutputStream containing TAP output from bats into a ProgressLogger and a Logger.
*
* TAP (Test Anything Protocol, https://testanything.org) is used by BATS for its output format.
*
* Every test output goes to the ProgressLogger and all failures
* and non-test output goes to the Logger. That means you can always glance
* at the result of the last test and the cumulative pass/fail/skip stats and
* the failures are all logged.
*
* There is a Tap4j project but we can't use it because it wants to parse the
* entire TAP stream at once and won't parse it stream-wise.
*/
public class TapLoggerOutputStream extends LoggingOutputStream {
private static final Pattern lineRegex =
Pattern.compile("(?<status>ok|not ok) \\d+(?<skip> # skip (?<skipReason>\\(.+\\))?)? \\[(?<suite>.+)\\] (?<test>.+)");
private final Logger logger;
private final ProgressLogger progressLogger;
private boolean isStarted = false;
private int testsCompleted = 0;
private int testsFailed = 0;
private int testsSkipped = 0;
private Integer testCount;
private String countsFormat;
TapLoggerOutputStream(Logger logger, ProgressLogger progressLogger) {
this.logger = logger;
this.progressLogger = progressLogger;
}
@Override
public void logLine(String line) {
if (isStarted == false) {
progressLogger.started("started");
isStarted = true;
}
if (testCount == null) {
try {
int lastDot = line.lastIndexOf('.');
testCount = Integer.parseInt(line.substring(lastDot + 1));
int length = String.valueOf(testCount).length();
String count = "%0" + length + "d";
countsFormat = "[" + count +"|" + count + "|" + count + "/" + count + "]";
return;
} catch (Exception e) {
throw new GradleScriptException("Error parsing first line of TAP stream!!", e);
}
}
Matcher m = lineRegex.matcher(line);
if (m.matches() == false) {
/* These might be failure report lines or comments or whatever. Its hard
to tell and it doesn't matter. */
logger.warn(line);
return;
}
boolean skipped = m.group("skip") != null;
boolean success = skipped == false && m.group("status").equals("ok");
String skipReason = m.group("skipReason");
String suiteName = m.group("suite");
String testName = m.group("test");
final String status;
if (skipped) {
status = "SKIPPED";
testsSkipped++;
} else if (success) {
status = " OK";
testsCompleted++;
} else {
status = " FAILED";
testsFailed++;
}
String counts = new Formatter().format(countsFormat, testsCompleted, testsFailed, testsSkipped, testCount).out().toString();
progressLogger.progress("BATS " + counts + ", " + status + " [" + suiteName + "] " + testName);
if (success == false) {
logger.warn(line);
}
}
@Override
public void close() {
flush();
progressLogger.completed();
}
}

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant;
import org.elasticsearch.gradle.LoggingOutputStream;
import org.gradle.internal.logging.progress.ProgressLogger;
public class VagrantLoggerOutputStream extends LoggingOutputStream {
private static final String HEADING_PREFIX = "==> ";
private final ProgressLogger progressLogger;
private final String squashedPrefix;
private boolean isStarted = false;
private String lastLine = "";
private boolean inProgressReport = false;
private String heading = "";
VagrantLoggerOutputStream(ProgressLogger progressLogger, String squashedPrefix) {
this.progressLogger = progressLogger;
this.squashedPrefix = squashedPrefix;
}
@Override
protected void logLine(String line) {
if (isStarted == false) {
progressLogger.started("started");
isStarted = true;
}
if (line.startsWith("\r\u001b")) {
/* We don't want to try to be a full terminal emulator but we want to
keep the escape sequences from leaking and catch _some_ of the
meaning. */
line = line.substring(2);
if ("[K".equals(line)) {
inProgressReport = true;
}
return;
}
if (line.startsWith(squashedPrefix)) {
line = line.substring(squashedPrefix.length());
inProgressReport = false;
lastLine = line;
if (line.startsWith(HEADING_PREFIX)) {
line = line.substring(HEADING_PREFIX.length());
heading = line + " > ";
} else {
line = heading + line;
}
} else if (inProgressReport) {
inProgressReport = false;
line = lastLine + line;
} else {
return;
}
progressLogger.progress(line);
}
@Override
public void close() {
flush();
progressLogger.completed();
}
}

View File

@ -17,7 +17,6 @@ import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.function.Consumer;
import java.util.function.Function;
@ -79,7 +78,13 @@ public class LoggedExec extends Exec {
};
} else {
out = new ByteArrayOutputStream();
outputLogger = logger -> logger.error(((ByteArrayOutputStream) out).toString(StandardCharsets.UTF_8));
outputLogger = logger -> {
try {
logger.error(((ByteArrayOutputStream) out).toString("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
};
}
setStandardOutput(out);
setErrorOutput(out);

View File

@ -188,9 +188,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
private ElasticsearchDistribution createDistro(Project project, String name, String version, Type type,
Platform platform, Flavor flavor, Boolean bundledJdk) {
@SuppressWarnings("unchecked")
NamedDomainObjectContainer<ElasticsearchDistribution> distros =
(NamedDomainObjectContainer<ElasticsearchDistribution>) project.getExtensions().getByName("elasticsearch_distributions");
NamedDomainObjectContainer<ElasticsearchDistribution> distros = DistributionDownloadPlugin.getContainer(project);
return distros.create(name, distro -> {
if (version != null) {
distro.setVersion(version);

View File

@ -21,6 +21,8 @@ package org.elasticsearch.gradle;
import java.io.File;
import java.io.IOException;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import org.apache.tools.ant.taskdefs.condition.Os;
import org.elasticsearch.gradle.test.GradleUnitTestCase;
import org.gradle.api.Project;
import org.gradle.testfixtures.ProjectBuilder;
@ -49,6 +51,8 @@ public class EmptyDirTaskTests extends GradleUnitTestCase {
}
public void testCreateEmptyDirNoPermissions() throws Exception {
RandomizedTest.assumeFalse("Functionality is Unix specific", Os.isFamily(Os.FAMILY_WINDOWS));
Project project = ProjectBuilder.builder().build();
EmptyDirTask emptyDirTask = project.getTasks().create("emptyDirTask", EmptyDirTask.class);
emptyDirTask.setDirMode(0000);

View File

@ -48,27 +48,7 @@ public class ThirdPartyAuditTaskIT extends GradleIntegrationTestCase {
"-PcompileOnlyGroup=other.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io", "-PcompileVersion=0.0.1"
)
.build();
assertTaskSuccessful(result, ":empty");
result = getGradleRunner("thirdPartyAudit")
.withArguments("empty", "-s",
"-PcompileOnlyGroup=other.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io", "-PcompileVersion=0.0.1"
)
.build();
assertTaskUpToDate(result, ":empty");
result = getGradleRunner("thirdPartyAudit")
.withArguments("empty", "-s",
"-PcompileOnlyGroup=other.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io", "-PcompileVersion=0.0.2"
)
.build();
assertTaskSuccessful(result, ":empty");
.buildAndFail();
}
public void testViolationFoundAndCompileOnlyIgnored() {

View File

@ -68,9 +68,9 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase {
public void testUseClusterByUpToDateTask() {
// Run it once, ignoring the result and again to make sure it's considered up to date.
// Gradle randomly considers tasks without inputs and outputs as as up-to-date or success on the first run
getTestClustersRunner(":upToDate1", ":upToDate2").build();
BuildResult result = getTestClustersRunner(":upToDate1", ":upToDate2").build();
assertTaskUpToDate(result, ":upToDate1", ":upToDate2");
getTestClustersRunner(":upToDate1").build();
BuildResult result = getTestClustersRunner(":upToDate1").build();
assertTaskUpToDate(result, ":upToDate1");
assertNotStarted(result);
}

View File

@ -26,7 +26,7 @@ allprojects { all ->
all.testClusters {
myTestCluster {
distribution = 'DEFAULT'
testDistribution = 'DEFAULT'
version = System.getProperty("test.version_under_test")
javaHome = file(System.getProperty('java.home'))
plugin file("${project(":dummyPlugin").buildDir}/distributions/dummy-${System.getProperty("test.version_under_test")}.zip")
@ -54,23 +54,23 @@ allprojects { all ->
testClusters {
multiNode {
version = System.getProperty("test.version_under_test")
distribution = 'DEFAULT'
testDistribution = 'DEFAULT'
javaHome = file(System.getProperty('java.home'))
numberOfNodes = 3
}
releasedVersionDefault {
version = "7.0.0"
distribution = 'DEFAULT'
testDistribution = 'DEFAULT'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionOSS {
version = "7.0.0"
distribution = 'OSS'
testDistribution = 'OSS'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionIntegTest {
version = "7.0.0"
distribution = 'INTEG_TEST'
testDistribution = 'INTEG_TEST'
javaHome = file(System.getProperty('java.home'))
}
}
@ -105,10 +105,10 @@ task printLog {
task upToDate1 {
useCluster testClusters.myTestCluster
}
task upToDate2 {
useCluster testClusters.myTestCluster
outputs.upToDateWhen { true }
doLast {
println "Some task action"
}
}
task skipped1 {
@ -137,6 +137,6 @@ task illegalConfigAlter {
useCluster testClusters.myTestCluster
doFirst {
println "Going to alter configuration after use"
testClusters.myTestCluster.distribution = 'OSS'
testClusters.myTestCluster.testDistribution = 'OSS'
}
}

View File

@ -2,8 +2,8 @@ import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask
plugins {
id 'java'
//just to get build-tools
id 'elasticsearch.testclusters'
// bring in build-tools onto the classpath
id 'elasticsearch.global-build-info' apply false
}
repositories {

View File

@ -1,5 +1,5 @@
elasticsearch = 7.4.0
lucene = 8.1.0
lucene = 8.2.0-snapshot-6413aae226
bundled_jdk = 12.0.1+12@69cfe15208a647278a19ef0990eea691

View File

@ -18,20 +18,15 @@
*/
package org.elasticsearch.plugin.noop.action.bulk;
import org.elasticsearch.action.StreamableResponseActionType;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.bulk.BulkResponse;
public class NoopBulkAction extends StreamableResponseActionType<BulkResponse> {
public class NoopBulkAction extends ActionType<BulkResponse> {
public static final String NAME = "mock:data/write/bulk";
public static final NoopBulkAction INSTANCE = new NoopBulkAction();
private NoopBulkAction() {
super(NAME);
}
@Override
public BulkResponse newResponse() {
return new BulkResponse(null, 0);
super(NAME, BulkResponse::new);
}
}

View File

@ -32,15 +32,13 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import java.util.function.Supplier;
public class TransportNoopBulkAction extends HandledTransportAction<BulkRequest, BulkResponse> {
private static final BulkItemResponse ITEM_RESPONSE = new BulkItemResponse(1, DocWriteRequest.OpType.UPDATE,
new UpdateResponse(new ShardId("mock", "", 1), "mock_type", "1", 1L, DocWriteResponse.Result.CREATED));
@Inject
public TransportNoopBulkAction(TransportService transportService, ActionFilters actionFilters) {
super(NoopBulkAction.NAME, transportService, actionFilters, (Supplier<BulkRequest>) BulkRequest::new);
super(NoopBulkAction.NAME, transportService, actionFilters, BulkRequest::new);
}
@Override

View File

@ -20,18 +20,12 @@ package org.elasticsearch.plugin.noop.action.search;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.io.stream.Writeable;
public class NoopSearchAction extends ActionType<SearchResponse> {
public static final NoopSearchAction INSTANCE = new NoopSearchAction();
public static final String NAME = "mock:data/read/search";
private NoopSearchAction() {
super(NAME);
}
@Override
public Writeable.Reader<SearchResponse> getResponseReader() {
return SearchResponse::new;
super(NAME, SearchResponse::new);
}
}

View File

@ -102,7 +102,7 @@ integTest.runner {
}
testClusters.integTest {
distribution = "DEFAULT"
testDistribution = 'DEFAULT'
systemProperty 'es.scripting.update.ctx_in_params', 'false'
setting 'reindex.remote.whitelist', '[ "[::1]:*", "127.0.0.1:*" ]'
setting 'xpack.license.self_generated.type', 'trial'

View File

@ -37,7 +37,9 @@ import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
import static org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest.FORCE;
import static org.elasticsearch.client.dataframe.GetDataFrameTransformRequest.ALLOW_NO_MATCH;
import static org.elasticsearch.client.dataframe.PutDataFrameTransformRequest.DEFER_VALIDATION;
final class DataFrameRequestConverters {
@ -50,6 +52,9 @@ final class DataFrameRequestConverters {
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRequest, REQUEST_BODY_CONTENT_TYPE));
if (putRequest.getDeferValidation() != null) {
request.addParameter(DEFER_VALIDATION, Boolean.toString(putRequest.getDeferValidation()));
}
return request;
}
@ -71,12 +76,16 @@ final class DataFrameRequestConverters {
return request;
}
static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest request) {
static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest deleteRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(request.getId())
.addPathPart(deleteRequest.getId())
.build();
return new Request(HttpDelete.METHOD_NAME, endpoint);
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
if (deleteRequest.getForce() != null) {
request.addParameter(FORCE, Boolean.toString(deleteRequest.getForce()));
}
return request;
}
static Request startDataFrameTransform(StartDataFrameTransformRequest startRequest) {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.common.xcontent.StatusToXContentObject;
import org.elasticsearch.common.xcontent.ToXContent;
@ -47,7 +46,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
* We would usually throw such exception, but we configure the client to not throw for 404 to support the case above, hence we also not
* throw in case an index is not found, although it is a hard error that doesn't come back with aliases.
*/
public class GetAliasesResponse extends ActionResponse implements StatusToXContentObject {
public class GetAliasesResponse implements StatusToXContentObject {
private final RestStatus status;
private final String error;

View File

@ -34,6 +34,12 @@ import org.elasticsearch.client.indexlifecycle.RemoveIndexLifecyclePolicyRespons
import org.elasticsearch.client.indexlifecycle.RetryLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.StartILMRequest;
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
import org.elasticsearch.client.snapshotlifecycle.DeleteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.snapshotlifecycle.ExecuteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.snapshotlifecycle.ExecuteSnapshotLifecyclePolicyResponse;
import org.elasticsearch.client.snapshotlifecycle.GetSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.snapshotlifecycle.GetSnapshotLifecyclePolicyResponse;
import org.elasticsearch.client.snapshotlifecycle.PutSnapshotLifecyclePolicyRequest;
import java.io.IOException;
@ -300,4 +306,144 @@ public class IndexLifecycleClient {
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndexLifecycleRequestConverters::retryLifecycle, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
}
/**
* Retrieve one or more snapshot lifecycle policy definitions.
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-get-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetSnapshotLifecyclePolicyResponse getSnapshotLifecyclePolicy(GetSnapshotLifecyclePolicyRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, IndexLifecycleRequestConverters::getSnapshotLifecyclePolicy,
options, GetSnapshotLifecyclePolicyResponse::fromXContent, emptySet());
}
/**
* Asynchronously retrieve one or more snapshot lifecycle policy definition.
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-get-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void getSnapshotLifecyclePolicyAsync(GetSnapshotLifecyclePolicyRequest request, RequestOptions options,
ActionListener<GetSnapshotLifecyclePolicyResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndexLifecycleRequestConverters::getSnapshotLifecyclePolicy,
options, GetSnapshotLifecyclePolicyResponse::fromXContent, listener, emptySet());
}
/**
* Create or modify a snapshot lifecycle definition.
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-put-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse putSnapshotLifecyclePolicy(PutSnapshotLifecyclePolicyRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, IndexLifecycleRequestConverters::putSnapshotLifecyclePolicy,
options, AcknowledgedResponse::fromXContent, emptySet());
}
/**
* Asynchronously create or modify a snapshot lifecycle definition.
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-put-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void putSnapshotLifecyclePolicyAsync(PutSnapshotLifecyclePolicyRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndexLifecycleRequestConverters::putSnapshotLifecyclePolicy,
options, AcknowledgedResponse::fromXContent, listener, emptySet());
}
/**
* Delete a snapshot lifecycle definition
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-delete-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse deleteSnapshotLifecyclePolicy(DeleteSnapshotLifecyclePolicyRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, IndexLifecycleRequestConverters::deleteSnapshotLifecyclePolicy,
options, AcknowledgedResponse::fromXContent, emptySet());
}
/**
* Asynchronously delete a snapshot lifecycle definition
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-delete-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void deleteSnapshotLifecyclePolicyAsync(DeleteSnapshotLifecyclePolicyRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndexLifecycleRequestConverters::deleteSnapshotLifecyclePolicy,
options, AcknowledgedResponse::fromXContent, listener, emptySet());
}
/**
* Execute a snapshot lifecycle definition
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-execute-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public ExecuteSnapshotLifecyclePolicyResponse executeSnapshotLifecyclePolicy(ExecuteSnapshotLifecyclePolicyRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, IndexLifecycleRequestConverters::executeSnapshotLifecyclePolicy,
options, ExecuteSnapshotLifecyclePolicyResponse::fromXContent, emptySet());
}
/**
* Asynchronously execute a snapshot lifecycle definition
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-execute-snapshot-lifecycle-policy.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void executeSnapshotLifecyclePolicyAsync(ExecuteSnapshotLifecyclePolicyRequest request, RequestOptions options,
ActionListener<ExecuteSnapshotLifecyclePolicyResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndexLifecycleRequestConverters::executeSnapshotLifecyclePolicy,
options, ExecuteSnapshotLifecyclePolicyResponse::fromXContent, listener, emptySet());
}
}

View File

@ -32,6 +32,10 @@ import org.elasticsearch.client.indexlifecycle.RemoveIndexLifecyclePolicyRequest
import org.elasticsearch.client.indexlifecycle.RetryLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.StartILMRequest;
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
import org.elasticsearch.client.snapshotlifecycle.DeleteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.snapshotlifecycle.ExecuteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.snapshotlifecycle.GetSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.snapshotlifecycle.PutSnapshotLifecyclePolicyRequest;
import org.elasticsearch.common.Strings;
import java.io.IOException;
@ -159,4 +163,56 @@ final class IndexLifecycleRequestConverters {
request.addParameters(params.asMap());
return request;
}
static Request getSnapshotLifecyclePolicy(GetSnapshotLifecyclePolicyRequest getSnapshotLifecyclePolicyRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_slm/policy")
.addCommaSeparatedPathParts(getSnapshotLifecyclePolicyRequest.getPolicyIds()).build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(getSnapshotLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(getSnapshotLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
return request;
}
static Request putSnapshotLifecyclePolicy(PutSnapshotLifecyclePolicyRequest putSnapshotLifecyclePolicyRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_slm/policy")
.addPathPartAsIs(putSnapshotLifecyclePolicyRequest.getPolicy().getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putSnapshotLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(putSnapshotLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(putSnapshotLifecyclePolicyRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request deleteSnapshotLifecyclePolicy(DeleteSnapshotLifecyclePolicyRequest deleteSnapshotLifecyclePolicyRequest) {
Request request = new Request(HttpDelete.METHOD_NAME,
new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_slm/policy")
.addPathPartAsIs(deleteSnapshotLifecyclePolicyRequest.getPolicyId())
.build());
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(deleteSnapshotLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(deleteSnapshotLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
return request;
}
static Request executeSnapshotLifecyclePolicy(ExecuteSnapshotLifecyclePolicyRequest executeSnapshotLifecyclePolicyRequest) {
Request request = new Request(HttpPut.METHOD_NAME,
new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_slm/policy")
.addPathPartAsIs(executeSnapshotLifecyclePolicyRequest.getPolicyId())
.addPathPartAsIs("_execute")
.build());
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(executeSnapshotLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(executeSnapshotLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
return request;
}
}

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
@ -47,6 +46,8 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.AnalyzeResponse;
import org.elasticsearch.client.indices.CloseIndexRequest;
import org.elasticsearch.client.indices.CloseIndexResponse;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.indices.FreezeIndexRequest;
@ -471,9 +472,9 @@ public final class IndicesClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException {
public CloseIndexResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(closeIndexRequest, IndicesRequestConverters::closeIndex, options,
AcknowledgedResponse::fromXContent, emptySet());
CloseIndexResponse::fromXContent, emptySet());
}
/**
@ -484,9 +485,9 @@ public final class IndicesClient {
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener<CloseIndexResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(closeIndexRequest, IndicesRequestConverters::closeIndex, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
CloseIndexResponse::fromXContent, listener, emptySet());
}
@ -569,7 +570,7 @@ public final class IndicesClient {
/**
* Initiate a synced flush manually using the synced flush API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-synced-flush.html">
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-flush.html#synced-flush-api">
* Synced flush API on elastic.co</a>
* @param syncedFlushRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -583,7 +584,7 @@ public final class IndicesClient {
/**
* Asynchronously initiate a synced flush manually using the synced flush API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-synced-flush.html">
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-flush.html#synced-flush-api">
* Synced flush API on elastic.co</a>
* @param syncedFlushRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized

View File

@ -27,7 +27,6 @@ import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
@ -41,6 +40,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.CloseIndexRequest;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.FreezeIndexRequest;
import org.elasticsearch.client.indices.GetFieldMappingsRequest;

View File

@ -1894,7 +1894,8 @@ public final class MachineLearningClient {
* Creates a new Data Frame Analytics config
* <p>
* For additional info
* see <a href="https://www.TODO.com">PUT Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-dfanalytics.html">
* PUT Data Frame Analytics documentation</a>
*
* @param request The {@link PutDataFrameAnalyticsRequest} containing the
* {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig}
@ -1916,7 +1917,8 @@ public final class MachineLearningClient {
* Creates a new Data Frame Analytics config asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">PUT Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-dfanalytics.html">
* PUT Data Frame Analytics documentation</a>
*
* @param request The {@link PutDataFrameAnalyticsRequest} containing the
* {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig}
@ -1937,7 +1939,8 @@ public final class MachineLearningClient {
* Gets a single or multiple Data Frame Analytics configs
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics.html">
* GET Data Frame Analytics documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -1957,7 +1960,8 @@ public final class MachineLearningClient {
* Gets a single or multiple Data Frame Analytics configs asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics.html">
* GET Data Frame Analytics documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -1977,7 +1981,8 @@ public final class MachineLearningClient {
* Gets the running statistics of a Data Frame Analytics
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics Stats documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics-stats.html">
* GET Data Frame Analytics Stats documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsStatsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -1996,7 +2001,8 @@ public final class MachineLearningClient {
* Gets the running statistics of a Data Frame Analytics asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics Stats documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics-stats.html">
* GET Data Frame Analytics Stats documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsStatsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2016,7 +2022,8 @@ public final class MachineLearningClient {
* Starts Data Frame Analytics
* <p>
* For additional info
* see <a href="https://www.TODO.com">Start Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/start-dfanalytics.html">
* Start Data Frame Analytics documentation</a>
*
* @param request The {@link StartDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2036,7 +2043,8 @@ public final class MachineLearningClient {
* Starts Data Frame Analytics asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Start Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/start-dfanalytics.html">
* Start Data Frame Analytics documentation</a>
*
* @param request The {@link StartDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2056,7 +2064,8 @@ public final class MachineLearningClient {
* Stops Data Frame Analytics
* <p>
* For additional info
* see <a href="https://www.TODO.com">Stop Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-dfanalytics.html">
* Stop Data Frame Analytics documentation</a>
*
* @param request The {@link StopDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2076,7 +2085,8 @@ public final class MachineLearningClient {
* Stops Data Frame Analytics asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Stop Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-dfanalytics.html">
* Stop Data Frame Analytics documentation</a>
*
* @param request The {@link StopDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2096,7 +2106,8 @@ public final class MachineLearningClient {
* Deletes the given Data Frame Analytics config
* <p>
* For additional info
* see <a href="https://www.TODO.com">DELETE Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-dfanalytics.html">
* DELETE Data Frame Analytics documentation</a>
*
* @param request The {@link DeleteDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2116,7 +2127,8 @@ public final class MachineLearningClient {
* Deletes the given Data Frame Analytics config asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">DELETE Data Frame Analytics documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-dfanalytics.html">
* DELETE Data Frame Analytics documentation</a>
*
* @param request The {@link DeleteDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2136,7 +2148,8 @@ public final class MachineLearningClient {
* Evaluates the given Data Frame
* <p>
* For additional info
* see <a href="https://www.TODO.com">Evaluate Data Frame documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/evaluate-dfanalytics.html">
* Evaluate Data Frame documentation</a>
*
* @param request The {@link EvaluateDataFrameRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -2156,7 +2169,8 @@ public final class MachineLearningClient {
* Evaluates the given Data Frame asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Evaluate Data Frame documentation</a>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/evaluate-dfanalytics.html">
* Evaluate Data Frame documentation</a>
*
* @param request The {@link EvaluateDataFrameRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized

View File

@ -1010,8 +1010,9 @@ public class RestHighLevelClient implements Closeable {
/**
* Executes a search using the Search Scroll API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html">Search Scroll
* API on elastic.co</a>
* See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll">Search
* Scroll API on elastic.co</a>
* @param searchScrollRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
@ -1024,8 +1025,9 @@ public class RestHighLevelClient implements Closeable {
/**
* Executes a search using the Search Scroll API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html">Search Scroll
* API on elastic.co</a>
* See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll">Search
* Scroll API on elastic.co</a>
* @param searchScrollRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
@ -1037,8 +1039,9 @@ public class RestHighLevelClient implements Closeable {
/**
* Asynchronously executes a search using the Search Scroll API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html">Search Scroll
* API on elastic.co</a>
* See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll">Search
* Scroll API on elastic.co</a>
* @param searchScrollRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
@ -1052,8 +1055,9 @@ public class RestHighLevelClient implements Closeable {
/**
* Asynchronously executes a search using the Search Scroll API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html">Search Scroll
* API on elastic.co</a>
* See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll">Search
* Scroll API on elastic.co</a>
* @param searchScrollRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
@ -1066,7 +1070,8 @@ public class RestHighLevelClient implements Closeable {
/**
* Clears one or more scroll ids using the Clear Scroll API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html#_clear_scroll_api">
* See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#_clear_scroll_api">
* Clear Scroll API on elastic.co</a>
* @param clearScrollRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -1079,7 +1084,8 @@ public class RestHighLevelClient implements Closeable {
/**
* Asynchronously clears one or more scroll ids using the Clear Scroll API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html#_clear_scroll_api">
* See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#_clear_scroll_api">
* Clear Scroll API on elastic.co</a>
* @param clearScrollRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -40,7 +39,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class SyncedFlushResponse extends ActionResponse implements ToXContentObject {
public class SyncedFlushResponse implements ToXContentObject {
public static final String SHARDS_FIELD = "_shards";

View File

@ -19,7 +19,6 @@
package org.elasticsearch.client.core;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
@ -35,7 +34,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
/**
* A response to _count API request.
*/
public final class CountResponse extends ActionResponse {
public final class CountResponse {
static final ParseField COUNT = new ParseField("count");
static final ParseField TERMINATED_EARLY = new ParseField("terminated_early");

View File

@ -31,7 +31,10 @@ import java.util.Optional;
*/
public class DeleteDataFrameTransformRequest implements Validatable {
public static final String FORCE = "force";
private final String id;
private Boolean force;
public DeleteDataFrameTransformRequest(String id) {
this.id = id;
@ -41,6 +44,14 @@ public class DeleteDataFrameTransformRequest implements Validatable {
return id;
}
public Boolean getForce() {
return force;
}
public void setForce(boolean force) {
this.force = force;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
@ -54,7 +65,7 @@ public class DeleteDataFrameTransformRequest implements Validatable {
@Override
public int hashCode() {
return Objects.hash(id);
return Objects.hash(id, force);
}
@Override
@ -67,6 +78,6 @@ public class DeleteDataFrameTransformRequest implements Validatable {
return false;
}
DeleteDataFrameTransformRequest other = (DeleteDataFrameTransformRequest) obj;
return Objects.equals(id, other.id);
return Objects.equals(id, other.id) && Objects.equals(force, other.force);
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.client.dataframe;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformStateAndStats;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -42,11 +42,11 @@ public class GetDataFrameTransformStatsResponse {
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetDataFrameTransformStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_data_frame_transform_stats_response", true,
args -> new GetDataFrameTransformStatsResponse((List<DataFrameTransformStateAndStats>) args[0],
args -> new GetDataFrameTransformStatsResponse((List<DataFrameTransformStats>) args[0],
(List<TaskOperationFailure>) args[1], (List<ElasticsearchException>) args[2]));
static {
PARSER.declareObjectArray(constructorArg(), DataFrameTransformStateAndStats.PARSER::apply, TRANSFORMS);
PARSER.declareObjectArray(constructorArg(), DataFrameTransformStats.PARSER::apply, TRANSFORMS);
// Discard the count field which is the size of the transforms array
PARSER.declareInt((a, b) -> {}, COUNT);
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p),
@ -59,20 +59,20 @@ public class GetDataFrameTransformStatsResponse {
return GetDataFrameTransformStatsResponse.PARSER.apply(parser, null);
}
private final List<DataFrameTransformStateAndStats> transformsStateAndStats;
private final List<DataFrameTransformStats> transformsStats;
private final List<TaskOperationFailure> taskFailures;
private final List<ElasticsearchException> nodeFailures;
public GetDataFrameTransformStatsResponse(List<DataFrameTransformStateAndStats> transformsStateAndStats,
public GetDataFrameTransformStatsResponse(List<DataFrameTransformStats> transformsStats,
@Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
this.transformsStateAndStats = transformsStateAndStats;
this.transformsStats = transformsStats;
this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures);
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures);
}
public List<DataFrameTransformStateAndStats> getTransformsStateAndStats() {
return transformsStateAndStats;
public List<DataFrameTransformStats> getTransformsStats() {
return transformsStats;
}
public List<ElasticsearchException> getNodeFailures() {
@ -85,7 +85,7 @@ public class GetDataFrameTransformStatsResponse {
@Override
public int hashCode() {
return Objects.hash(transformsStateAndStats, nodeFailures, taskFailures);
return Objects.hash(transformsStats, nodeFailures, taskFailures);
}
@Override
@ -99,7 +99,7 @@ public class GetDataFrameTransformStatsResponse {
}
final GetDataFrameTransformStatsResponse that = (GetDataFrameTransformStatsResponse) other;
return Objects.equals(this.transformsStateAndStats, that.transformsStateAndStats)
return Objects.equals(this.transformsStats, that.transformsStats)
&& Objects.equals(this.nodeFailures, that.nodeFailures)
&& Objects.equals(this.taskFailures, that.taskFailures);
}

View File

@ -31,7 +31,9 @@ import java.util.Optional;
public class PutDataFrameTransformRequest implements ToXContentObject, Validatable {
public static final String DEFER_VALIDATION = "defer_validation";
private final DataFrameTransformConfig config;
private Boolean deferValidation;
public PutDataFrameTransformRequest(DataFrameTransformConfig config) {
this.config = config;
@ -41,6 +43,19 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab
return config;
}
public Boolean getDeferValidation() {
return deferValidation;
}
/**
* Indicates if deferrable validations should be skipped until the transform starts
*
* @param deferValidation {@code true} will cause validations to be deferred
*/
public void setDeferValidation(boolean deferValidation) {
this.deferValidation = deferValidation;
}
@Override
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Holds state of the cursors:
*
* indexer_position: the position of the indexer querying the source
* bucket_position: the position used for identifying changes
*/
public class DataFrameIndexerPosition {
public static final ParseField INDEXER_POSITION = new ParseField("indexer_position");
public static final ParseField BUCKET_POSITION = new ParseField("bucket_position");
private final Map<String, Object> indexerPosition;
private final Map<String, Object> bucketPosition;
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<DataFrameIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_indexer_position",
true,
args -> new DataFrameIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
static {
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT);
}
public DataFrameIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition);
this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition);
}
public Map<String, Object> getIndexerPosition() {
return indexerPosition;
}
public Map<String, Object> getBucketsPosition() {
return bucketPosition;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DataFrameIndexerPosition that = (DataFrameIndexerPosition) other;
return Objects.equals(this.indexerPosition, that.indexerPosition) &&
Objects.equals(this.bucketPosition, that.bucketPosition);
}
@Override
public int hashCode() {
return Objects.hash(indexerPosition, bucketPosition);
}
public static DataFrameIndexerPosition fromXContent(XContentParser parser) {
try {
return PARSER.parse(parser, null);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -19,48 +19,86 @@
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformCheckpointStats {
public static final ParseField CHECKPOINT = new ParseField("checkpoint");
public static final ParseField INDEXER_STATE = new ParseField("indexer_state");
public static final ParseField POSITION = new ParseField("position");
public static final ParseField CHECKPOINT_PROGRESS = new ParseField("checkpoint_progress");
public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis");
public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis");
public static DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, 0L);
public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, null, 0L, 0L);
private final long checkpoint;
private final IndexerState indexerState;
private final DataFrameIndexerPosition position;
private final DataFrameTransformProgress checkpointProgress;
private final long timestampMillis;
private final long timeUpperBoundMillis;
public static final ConstructingObjectParser<DataFrameTransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
"data_frame_transform_checkpoint_stats", true, args -> {
long timestamp = args[0] == null ? 0L : (Long) args[0];
long timeUpperBound = args[1] == null ? 0L : (Long) args[1];
long checkpoint = args[0] == null ? 0L : (Long) args[0];
IndexerState indexerState = (IndexerState) args[1];
DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[2];
DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[3];
long timestamp = args[4] == null ? 0L : (Long) args[4];
long timeUpperBound = args[5] == null ? 0L : (Long) args[5];
return new DataFrameTransformCheckpointStats(timestamp, timeUpperBound);
});
return new DataFrameTransformCheckpointStats(checkpoint, indexerState, position, checkpointProgress, timestamp, timeUpperBound);
});
static {
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), TIMESTAMP_MILLIS);
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT);
LENIENT_PARSER.declareField(optionalConstructorArg(), p -> IndexerState.fromString(p.text()), INDEXER_STATE,
ObjectParser.ValueType.STRING);
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, POSITION);
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, CHECKPOINT_PROGRESS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS);
}
public static DataFrameTransformCheckpointStats fromXContent(XContentParser parser) throws IOException {
return LENIENT_PARSER.parse(parser, null);
}
public DataFrameTransformCheckpointStats(final long timestampMillis, final long timeUpperBoundMillis) {
public DataFrameTransformCheckpointStats(final long checkpoint, final IndexerState indexerState,
final DataFrameIndexerPosition position, final DataFrameTransformProgress checkpointProgress,
final long timestampMillis, final long timeUpperBoundMillis) {
this.checkpoint = checkpoint;
this.indexerState = indexerState;
this.position = position;
this.checkpointProgress = checkpointProgress;
this.timestampMillis = timestampMillis;
this.timeUpperBoundMillis = timeUpperBoundMillis;
}
public DataFrameTransformCheckpointStats(StreamInput in) throws IOException {
this.timestampMillis = in.readLong();
this.timeUpperBoundMillis = in.readLong();
public long getCheckpoint() {
return checkpoint;
}
public IndexerState getIndexerState() {
return indexerState;
}
public DataFrameIndexerPosition getPosition() {
return position;
}
public DataFrameTransformProgress getCheckpointProgress() {
return checkpointProgress;
}
public long getTimestampMillis() {
@ -73,7 +111,7 @@ public class DataFrameTransformCheckpointStats {
@Override
public int hashCode() {
return Objects.hash(timestampMillis, timeUpperBoundMillis);
return Objects.hash(checkpoint, indexerState, position, checkpointProgress, timestampMillis, timeUpperBoundMillis);
}
@Override
@ -88,6 +126,11 @@ public class DataFrameTransformCheckpointStats {
DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other;
return this.timestampMillis == that.timestampMillis && this.timeUpperBoundMillis == that.timeUpperBoundMillis;
return this.checkpoint == that.checkpoint
&& Objects.equals(this.indexerState, that.indexerState)
&& Objects.equals(this.position, that.position)
&& Objects.equals(this.checkpointProgress, that.checkpointProgress)
&& this.timestampMillis == that.timestampMillis
&& this.timeUpperBoundMillis == that.timeUpperBoundMillis;
}
}

View File

@ -27,15 +27,14 @@ import java.util.Objects;
public class DataFrameTransformCheckpointingInfo {
public static final ParseField CURRENT_CHECKPOINT = new ParseField("current");
public static final ParseField IN_PROGRESS_CHECKPOINT = new ParseField("in_progress");
public static final ParseField LAST_CHECKPOINT = new ParseField("last", "current");
public static final ParseField NEXT_CHECKPOINT = new ParseField("next", "in_progress");
public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind");
private final DataFrameTransformCheckpointStats current;
private final DataFrameTransformCheckpointStats inProgress;
private final DataFrameTransformCheckpointStats last;
private final DataFrameTransformCheckpointStats next;
private final long operationsBehind;
private static final ConstructingObjectParser<DataFrameTransformCheckpointingInfo, Void> LENIENT_PARSER =
new ConstructingObjectParser<>(
"data_frame_transform_checkpointing_info", true, a -> {
@ -48,25 +47,25 @@ public class DataFrameTransformCheckpointingInfo {
static {
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), CURRENT_CHECKPOINT);
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT);
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), IN_PROGRESS_CHECKPOINT);
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT);
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND);
}
public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats current, DataFrameTransformCheckpointStats inProgress,
public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last, DataFrameTransformCheckpointStats next,
long operationsBehind) {
this.current = Objects.requireNonNull(current);
this.inProgress = Objects.requireNonNull(inProgress);
this.last = Objects.requireNonNull(last);
this.next = Objects.requireNonNull(next);
this.operationsBehind = operationsBehind;
}
public DataFrameTransformCheckpointStats getCurrent() {
return current;
public DataFrameTransformCheckpointStats getLast() {
return last;
}
public DataFrameTransformCheckpointStats getInProgress() {
return inProgress;
public DataFrameTransformCheckpointStats getNext() {
return next;
}
public long getOperationsBehind() {
@ -79,7 +78,7 @@ public class DataFrameTransformCheckpointingInfo {
@Override
public int hashCode() {
return Objects.hash(current, inProgress, operationsBehind);
return Objects.hash(last, next, operationsBehind);
}
@Override
@ -94,8 +93,8 @@ public class DataFrameTransformCheckpointingInfo {
DataFrameTransformCheckpointingInfo that = (DataFrameTransformCheckpointingInfo) other;
return Objects.equals(this.current, that.current) &&
Objects.equals(this.inProgress, that.inProgress) &&
return Objects.equals(this.last, that.last) &&
Objects.equals(this.next, that.next) &&
this.operationsBehind == that.operationsBehind;
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.client.dataframe.transforms.util.TimeUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -44,6 +45,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
public static final ParseField ID = new ParseField("id");
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DEST = new ParseField("dest");
public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SYNC = new ParseField("sync");
public static final ParseField VERSION = new ParseField("version");
@ -54,6 +56,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
private final String id;
private final SourceConfig source;
private final DestConfig dest;
private final TimeValue frequency;
private final SyncConfig syncConfig;
private final PivotConfig pivotConfig;
private final String description;
@ -66,14 +69,16 @@ public class DataFrameTransformConfig implements ToXContentObject {
String id = (String) args[0];
SourceConfig source = (SourceConfig) args[1];
DestConfig dest = (DestConfig) args[2];
SyncConfig syncConfig = (SyncConfig) args[3];
PivotConfig pivotConfig = (PivotConfig) args[4];
String description = (String)args[5];
Instant createTime = (Instant)args[6];
String transformVersion = (String)args[7];
TimeValue frequency = (TimeValue) args[3];
SyncConfig syncConfig = (SyncConfig) args[4];
PivotConfig pivotConfig = (PivotConfig) args[5];
String description = (String)args[6];
Instant createTime = (Instant)args[7];
String transformVersion = (String)args[8];
return new DataFrameTransformConfig(id,
source,
dest,
frequency,
syncConfig,
pivotConfig,
description,
@ -85,6 +90,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
PARSER.declareString(constructorArg(), ID);
PARSER.declareObject(constructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), SOURCE);
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
PARSER.declareField(optionalConstructorArg(), p -> TimeValue.parseTimeValue(p.text(), FREQUENCY.getPreferredName()),
FREQUENCY, ObjectParser.ValueType.STRING);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), SYNC);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
@ -118,12 +125,13 @@ public class DataFrameTransformConfig implements ToXContentObject {
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new DataFrameTransformConfig(null, source, null, null, pivotConfig, null, null, null);
return new DataFrameTransformConfig(null, source, null, null, null, pivotConfig, null, null, null);
}
DataFrameTransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final PivotConfig pivotConfig,
final String description,
@ -132,6 +140,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
this.id = id;
this.source = source;
this.dest = dest;
this.frequency = frequency;
this.syncConfig = syncConfig;
this.pivotConfig = pivotConfig;
this.description = description;
@ -151,6 +160,10 @@ public class DataFrameTransformConfig implements ToXContentObject {
return dest;
}
public TimeValue getFrequency() {
return frequency;
}
public SyncConfig getSyncConfig() {
return syncConfig;
}
@ -184,6 +197,9 @@ public class DataFrameTransformConfig implements ToXContentObject {
if (dest != null) {
builder.field(DEST.getPreferredName(), dest);
}
if (frequency != null) {
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(SYNC.getPreferredName());
builder.field(syncConfig.getName(), syncConfig);
@ -220,6 +236,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.transformVersion, that.transformVersion)
@ -229,7 +246,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(id, source, dest, syncConfig, pivotConfig, description);
return Objects.hash(id, source, dest, frequency, syncConfig, pivotConfig, description);
}
@Override
@ -246,6 +263,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
private String id;
private SourceConfig source;
private DestConfig dest;
private TimeValue frequency;
private SyncConfig syncConfig;
private PivotConfig pivotConfig;
private String description;
@ -265,6 +283,11 @@ public class DataFrameTransformConfig implements ToXContentObject {
return this;
}
public Builder setFrequency(TimeValue frequency) {
this.frequency = frequency;
return this;
}
public Builder setSyncConfig(SyncConfig syncConfig) {
this.syncConfig = syncConfig;
return this;
@ -281,7 +304,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
}
public DataFrameTransformConfig build() {
return new DataFrameTransformConfig(id, source, dest, syncConfig, pivotConfig, description, null, null);
return new DataFrameTransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null);
}
}
}

View File

@ -1,155 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformState {
private static final ParseField INDEXER_STATE = new ParseField("indexer_state");
private static final ParseField TASK_STATE = new ParseField("task_state");
private static final ParseField CURRENT_POSITION = new ParseField("current_position");
private static final ParseField CHECKPOINT = new ParseField("checkpoint");
private static final ParseField REASON = new ParseField("reason");
private static final ParseField PROGRESS = new ParseField("progress");
private static final ParseField NODE = new ParseField("node");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<DataFrameTransformState, Void> PARSER =
new ConstructingObjectParser<>("data_frame_transform_state", true,
args -> new DataFrameTransformState((DataFrameTransformTaskState) args[0],
(IndexerState) args[1],
(Map<String, Object>) args[2],
(long) args[3],
(String) args[4],
(DataFrameTransformProgress) args[5],
(NodeAttributes) args[6]));
static {
PARSER.declareField(constructorArg(), p -> DataFrameTransformTaskState.fromString(p.text()), TASK_STATE, ValueType.STRING);
PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), INDEXER_STATE, ValueType.STRING);
PARSER.declareField(optionalConstructorArg(), (p, c) -> p.mapOrdered(), CURRENT_POSITION, ValueType.OBJECT);
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CHECKPOINT);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON);
PARSER.declareField(optionalConstructorArg(), DataFrameTransformProgress::fromXContent, PROGRESS, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE, ValueType.OBJECT);
}
public static DataFrameTransformState fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final DataFrameTransformTaskState taskState;
private final IndexerState indexerState;
private final long checkpoint;
private final Map<String, Object> currentPosition;
private final String reason;
private final DataFrameTransformProgress progress;
private final NodeAttributes node;
public DataFrameTransformState(DataFrameTransformTaskState taskState,
IndexerState indexerState,
@Nullable Map<String, Object> position,
long checkpoint,
@Nullable String reason,
@Nullable DataFrameTransformProgress progress,
@Nullable NodeAttributes node) {
this.taskState = taskState;
this.indexerState = indexerState;
this.currentPosition = position == null ? null : Collections.unmodifiableMap(new LinkedHashMap<>(position));
this.checkpoint = checkpoint;
this.reason = reason;
this.progress = progress;
this.node = node;
}
public IndexerState getIndexerState() {
return indexerState;
}
public DataFrameTransformTaskState getTaskState() {
return taskState;
}
@Nullable
public Map<String, Object> getPosition() {
return currentPosition;
}
public long getCheckpoint() {
return checkpoint;
}
@Nullable
public String getReason() {
return reason;
}
@Nullable
public DataFrameTransformProgress getProgress() {
return progress;
}
@Nullable
public NodeAttributes getNode() {
return node;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DataFrameTransformState that = (DataFrameTransformState) other;
return Objects.equals(this.taskState, that.taskState) &&
Objects.equals(this.indexerState, that.indexerState) &&
Objects.equals(this.currentPosition, that.currentPosition) &&
Objects.equals(this.progress, that.progress) &&
this.checkpoint == that.checkpoint &&
Objects.equals(this.node, that.node) &&
Objects.equals(this.reason, that.reason);
}
@Override
public int hashCode() {
return Objects.hash(taskState, indexerState, currentPosition, checkpoint, reason, progress, node);
}
}

View File

@ -1,104 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
public class DataFrameTransformStateAndStats {
public static final ParseField ID = new ParseField("id");
public static final ParseField STATE_FIELD = new ParseField("state");
public static final ParseField STATS_FIELD = new ParseField("stats");
public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing");
public static final ConstructingObjectParser<DataFrameTransformStateAndStats, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_transform_state_and_stats", true,
a -> new DataFrameTransformStateAndStats((String) a[0], (DataFrameTransformState) a[1], (DataFrameIndexerTransformStats) a[2],
(DataFrameTransformCheckpointingInfo) a[3]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameTransformState.PARSER::apply, STATE_FIELD);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p),
STATS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
}
public static DataFrameTransformStateAndStats fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String id;
private final DataFrameTransformState transformState;
private final DataFrameIndexerTransformStats transformStats;
private final DataFrameTransformCheckpointingInfo checkpointingInfo;
public DataFrameTransformStateAndStats(String id, DataFrameTransformState state, DataFrameIndexerTransformStats stats,
DataFrameTransformCheckpointingInfo checkpointingInfo) {
this.id = id;
this.transformState = state;
this.transformStats = stats;
this.checkpointingInfo = checkpointingInfo;
}
public String getId() {
return id;
}
public DataFrameIndexerTransformStats getTransformStats() {
return transformStats;
}
public DataFrameTransformState getTransformState() {
return transformState;
}
public DataFrameTransformCheckpointingInfo getCheckpointingInfo() {
return checkpointingInfo;
}
@Override
public int hashCode() {
return Objects.hash(id, transformState, transformStats, checkpointingInfo);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DataFrameTransformStateAndStats that = (DataFrameTransformStateAndStats) other;
return Objects.equals(this.id, that.id) && Objects.equals(this.transformState, that.transformState)
&& Objects.equals(this.transformStats, that.transformStats)
&& Objects.equals(this.checkpointingInfo, that.checkpointingInfo);
}
}

View File

@ -0,0 +1,128 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformStats {
public static final ParseField ID = new ParseField("id");
public static final ParseField TASK_STATE_FIELD = new ParseField("task_state");
public static final ParseField REASON_FIELD = new ParseField("reason");
public static final ParseField NODE_FIELD = new ParseField("node");
public static final ParseField STATS_FIELD = new ParseField("stats");
public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing");
public static final ConstructingObjectParser<DataFrameTransformStats, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_transform_state_and_stats_info", true,
a -> new DataFrameTransformStats((String) a[0], (DataFrameTransformTaskState) a[1], (String) a[2],
(NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], (DataFrameTransformCheckpointingInfo) a[5]));
static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareField(optionalConstructorArg(), p -> DataFrameTransformTaskState.fromString(p.text()), TASK_STATE_FIELD,
ObjectParser.ValueType.STRING);
PARSER.declareString(optionalConstructorArg(), REASON_FIELD);
PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT);
PARSER.declareObject(constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p), STATS_FIELD);
PARSER.declareObject(optionalConstructorArg(),
(p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
}
public static DataFrameTransformStats fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String id;
private final String reason;
private final DataFrameTransformTaskState taskState;
private final NodeAttributes node;
private final DataFrameIndexerTransformStats indexerStats;
private final DataFrameTransformCheckpointingInfo checkpointingInfo;
public DataFrameTransformStats(String id, DataFrameTransformTaskState taskState, String reason, NodeAttributes node,
DataFrameIndexerTransformStats stats,
DataFrameTransformCheckpointingInfo checkpointingInfo) {
this.id = id;
this.taskState = taskState;
this.reason = reason;
this.node = node;
this.indexerStats = stats;
this.checkpointingInfo = checkpointingInfo;
}
public String getId() {
return id;
}
public DataFrameTransformTaskState getTaskState() {
return taskState;
}
public String getReason() {
return reason;
}
public NodeAttributes getNode() {
return node;
}
public DataFrameIndexerTransformStats getIndexerStats() {
return indexerStats;
}
public DataFrameTransformCheckpointingInfo getCheckpointingInfo() {
return checkpointingInfo;
}
@Override
public int hashCode() {
return Objects.hash(id, taskState, reason, node, indexerStats, checkpointingInfo);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DataFrameTransformStats that = (DataFrameTransformStats) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.taskState, that.taskState)
&& Objects.equals(this.reason, that.reason)
&& Objects.equals(this.node, that.node)
&& Objects.equals(this.indexerStats, that.indexerStats)
&& Objects.equals(this.checkpointingInfo, that.checkpointingInfo);
}
}

View File

@ -23,8 +23,6 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -313,17 +311,6 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte
public float getBoost() {
return boost;
}
void readFrom(StreamInput in) throws IOException {
this.term = in.readString();
this.boost = in.readFloat();
}
void writeTo(StreamOutput out) throws IOException {
out.writeString(term);
out.writeFloat(boost);
}
}
@Override

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -52,6 +53,7 @@ public class IndexLifecycleExplainResponse implements ToXContentObject {
private static final ParseField STEP_TIME_FIELD = new ParseField("step_time");
private static final ParseField STEP_INFO_FIELD = new ParseField("step_info");
private static final ParseField PHASE_EXECUTION_INFO = new ParseField("phase_execution");
private static final ParseField AGE_FIELD = new ParseField("age");
public static final ConstructingObjectParser<IndexLifecycleExplainResponse, Void> PARSER = new ConstructingObjectParser<>(
"index_lifecycle_explain_response", true,
@ -205,6 +207,14 @@ public class IndexLifecycleExplainResponse implements ToXContentObject {
return phaseExecutionInfo;
}
public TimeValue getAge() {
if (lifecycleDate == null) {
return TimeValue.MINUS_ONE;
} else {
return TimeValue.timeValueMillis(System.currentTimeMillis() - lifecycleDate);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -214,6 +224,7 @@ public class IndexLifecycleExplainResponse implements ToXContentObject {
builder.field(POLICY_NAME_FIELD.getPreferredName(), policyName);
if (lifecycleDate != null) {
builder.timeField(LIFECYCLE_DATE_MILLIS_FIELD.getPreferredName(), LIFECYCLE_DATE_FIELD.getPreferredName(), lifecycleDate);
builder.field(AGE_FIELD.getPreferredName(), getAge().toHumanReadableString(2));
}
if (phase != null) {
builder.field(PHASE_FIELD.getPreferredName(), phase);

View File

@ -0,0 +1,113 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.TimedRequest;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import java.util.Optional;
/**
* A request to close an index.
*/
public class CloseIndexRequest extends TimedRequest implements Validatable {
private String[] indices;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT;
/**
* Creates a new close index request
*
* @param indices the indices to close
*/
public CloseIndexRequest(String... indices) {
this.indices = indices;
}
/**
* Returns the indices to close
*/
public String[] indices() {
return indices;
}
/**
* Specifies what type of requested indices to ignore and how to deal with wildcard expressions.
* For example indices that don't exist.
*
* @return the current behaviour when it comes to index names and wildcard indices expressions
*/
public IndicesOptions indicesOptions() {
return indicesOptions;
}
/**
* Specifies what type of requested indices to ignore and how to deal with wildcard expressions.
* For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices to ignore and wildcard indices expressions
*/
public CloseIndexRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* Returns the wait for active shard count or null if the default should be used
*/
public ActiveShardCount waitForActiveShards() {
return waitForActiveShards;
}
/**
* Sets the number of shard copies that should be active for indices opening to return.
* Defaults to {@link ActiveShardCount#DEFAULT}, which will wait for one shard copy
* (the primary) to become active. Set this value to {@link ActiveShardCount#ALL} to
* wait for all shards (primary and all replicas) to be active before returning.
* Otherwise, use {@link ActiveShardCount#from(int)} to set this value to any
* non-negative integer, up to the number of copies per shard (number of replicas + 1),
* to wait for the desired amount of shard copies to become active before returning.
* Indices opening will only wait up until the timeout value for the number of shard copies
* to be active before returning. Check {@link OpenIndexResponse#isShardsAcknowledged()} to
* determine if the requisite shard copies were all started before returning or timing out.
*
* @param waitForActiveShards number of active shard copies to wait on
*/
public CloseIndexRequest waitForActiveShards(ActiveShardCount waitForActiveShards) {
this.waitForActiveShards = waitForActiveShards;
return this;
}
@Override
public Optional<ValidationException> validate() {
if (indices == null || indices.length == 0) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("index is missing");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
}

View File

@ -0,0 +1,213 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.util.List;
import java.util.Objects;
import static java.util.Collections.emptyList;
import static java.util.Collections.unmodifiableList;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.ObjectParser.ValueType;
public class CloseIndexResponse extends ShardsAcknowledgedResponse {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<CloseIndexResponse, Void> PARSER = new ConstructingObjectParser<>("close_index_response",
true, args -> {
boolean acknowledged = (boolean) args[0];
boolean shardsAcknowledged = args[1] != null ? (boolean) args[1] : acknowledged;
List<CloseIndexResponse.IndexResult> indices = args[2] != null ? (List<CloseIndexResponse.IndexResult>) args[2] : emptyList();
return new CloseIndexResponse(acknowledged, shardsAcknowledged, indices);
});
static {
declareAcknowledgedField(PARSER);
PARSER.declareField(optionalConstructorArg(), (parser, context) -> parser.booleanValue(), SHARDS_ACKNOWLEDGED, ValueType.BOOLEAN);
PARSER.declareNamedObjects(optionalConstructorArg(), (p, c, name) -> IndexResult.fromXContent(p, name), new ParseField("indices"));
}
private final List<CloseIndexResponse.IndexResult> indices;
public CloseIndexResponse(final boolean acknowledged, final boolean shardsAcknowledged, final List<IndexResult> indices) {
super(acknowledged, shardsAcknowledged);
this.indices = unmodifiableList(Objects.requireNonNull(indices));
}
public List<IndexResult> getIndices() {
return indices;
}
public static CloseIndexResponse fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
public static class IndexResult {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<IndexResult, String> PARSER = new ConstructingObjectParser<>("index_result", true,
(args, index) -> {
Exception exception = (Exception) args[1];
if (exception != null) {
assert (boolean) args[0] == false;
return new IndexResult(index, exception);
}
ShardResult[] shardResults = args[2] != null ? ((List<ShardResult>) args[2]).toArray(new ShardResult[0]) : null;
if (shardResults != null) {
assert (boolean) args[0] == false;
return new IndexResult(index, shardResults);
}
assert (boolean) args[0];
return new IndexResult(index);
});
static {
PARSER.declareBoolean(optionalConstructorArg(), new ParseField("closed"));
PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.currentToken(), p::getTokenLocation);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p::getTokenLocation);
Exception e = ElasticsearchException.failureFromXContent(p);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p::getTokenLocation);
return e;
}, new ParseField("exception"));
PARSER.declareNamedObjects(optionalConstructorArg(),
(p, c, id) -> ShardResult.fromXContent(p, id), new ParseField("failedShards"));
}
private final String index;
private final @Nullable Exception exception;
private final @Nullable ShardResult[] shards;
IndexResult(final String index) {
this(index, null, null);
}
IndexResult(final String index, final Exception failure) {
this(index, Objects.requireNonNull(failure), null);
}
IndexResult(final String index, final ShardResult[] shards) {
this(index, null, Objects.requireNonNull(shards));
}
private IndexResult(final String index, @Nullable final Exception exception, @Nullable final ShardResult[] shards) {
this.index = Objects.requireNonNull(index);
this.exception = exception;
this.shards = shards;
}
public String getIndex() {
return index;
}
public @Nullable Exception getException() {
return exception;
}
public @Nullable ShardResult[] getShards() {
return shards;
}
public boolean hasFailures() {
if (exception != null) {
return true;
}
if (shards != null) {
for (ShardResult shard : shards) {
if (shard.hasFailures()) {
return true;
}
}
}
return false;
}
static IndexResult fromXContent(final XContentParser parser, final String name) {
return PARSER.apply(parser, name);
}
}
public static class ShardResult {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<ShardResult, String> PARSER = new ConstructingObjectParser<>("shard_result", true,
(arg, id) -> {
Failure[] failures = arg[0] != null ? ((List<Failure>) arg[0]).toArray(new Failure[0]) : new Failure[0];
return new ShardResult(Integer.parseInt(id), failures);
});
static {
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Failure.PARSER.apply(p, null), new ParseField("failures"));
}
private final int id;
private final Failure[] failures;
ShardResult(final int id, final Failure[] failures) {
this.id = id;
this.failures = failures;
}
public boolean hasFailures() {
return failures != null && failures.length > 0;
}
public int getId() {
return id;
}
public Failure[] getFailures() {
return failures;
}
static ShardResult fromXContent(final XContentParser parser, final String id) {
return PARSER.apply(parser, id);
}
public static class Failure extends DefaultShardOperationFailedException {
static final ConstructingObjectParser<Failure, Void> PARSER = new ConstructingObjectParser<>("failure", true,
arg -> new Failure((String) arg[0], (int) arg[1], (Throwable) arg[2], (String) arg[3]));
static {
declareFields(PARSER);
PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("node"));
}
private @Nullable String nodeId;
Failure(final String index, final int shardId, final Throwable reason, final String nodeId) {
super(index, shardId, reason);
this.nodeId = nodeId;
}
public String getNodeId() {
return nodeId;
}
}
}
}

View File

@ -70,7 +70,7 @@ public final class FreezeIndexRequest extends TimedRequest {
}
/**
* Returns the wait for active shard cound or null if the default should be used
* Returns the wait for active shard count or null if the default should be used
*/
public ActiveShardCount getWaitForActiveShards() {
return waitForActiveShards;

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -32,7 +31,7 @@ import java.util.Objects;
/**
* Abstract class that provides a list of results and their count.
*/
public abstract class AbstractResultResponse<T extends ToXContent> extends ActionResponse implements ToXContentObject {
public abstract class AbstractResultResponse<T extends ToXContent> implements ToXContentObject {
public static final ParseField COUNT = new ParseField("count");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Response indicating if the Job(s) closed or not
*/
public class CloseJobResponse extends ActionResponse implements ToXContentObject {
public class CloseJobResponse implements ToXContentObject {
private static final ParseField CLOSED = new ParseField("closed");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* A response acknowledging the deletion of expired data
*/
public class DeleteExpiredDataResponse extends ActionResponse implements ToXContentObject {
public class DeleteExpiredDataResponse implements ToXContentObject {
private static final ParseField DELETED = new ParseField("deleted");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -35,7 +34,7 @@ import java.util.Objects;
* Response object that contains the acknowledgement or the task id
* depending on whether the delete job action was requested to wait for completion.
*/
public class DeleteJobResponse extends ActionResponse implements ToXContentObject {
public class DeleteJobResponse implements ToXContentObject {
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
private static final ParseField TASK = new ParseField("task");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* Response object containing flush acknowledgement and additional data
*/
public class FlushJobResponse extends ActionResponse implements ToXContentObject {
public class FlushJobResponse implements ToXContentObject {
public static final ParseField FLUSHED = new ParseField("flushed");
public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Forecast response object
*/
public class ForecastJobResponse extends ActionResponse implements ToXContentObject {
public class ForecastJobResponse implements ToXContentObject {
public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
public static final ParseField FORECAST_ID = new ParseField("forecast_id");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Response indicating if the Machine Learning Job is now opened or not
*/
public class OpenJobResponse extends ActionResponse implements ToXContentObject {
public class OpenJobResponse implements ToXContentObject {
private static final ParseField OPENED = new ParseField("opened");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* Response to adding ScheduledEvent(s) to a Machine Learning calendar
*/
public class PostCalendarEventResponse extends ActionResponse implements ToXContentObject {
public class PostCalendarEventResponse implements ToXContentObject {
private final List<ScheduledEvent> scheduledEvents;
public static final ParseField EVENTS = new ParseField("events");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.job.process.DataCounts;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Response object when posting data to a Machine Learning Job
*/
public class PostDataResponse extends ActionResponse implements ToXContentObject {
public class PostDataResponse implements ToXContentObject {
private DataCounts dataCounts;

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@ -41,7 +40,7 @@ import java.util.stream.Collectors;
/**
* Response containing a datafeed preview in JSON format
*/
public class PreviewDatafeedResponse extends ActionResponse implements ToXContentObject {
public class PreviewDatafeedResponse implements ToXContentObject {
private BytesReference preview;

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -27,13 +26,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* A response containing the reverted model snapshot
*/
public class RevertModelSnapshotResponse extends ActionResponse implements ToXContentObject {
public class RevertModelSnapshotResponse implements ToXContentObject {
private static final ParseField MODEL = new ParseField("model");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Response indicating if the Machine Learning Datafeed is now started or not
*/
public class StartDatafeedResponse extends ActionResponse implements ToXContentObject {
public class StartDatafeedResponse implements ToXContentObject {
private static final ParseField STARTED = new ParseField("started");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Response indicating if the Machine Learning Datafeed is now stopped or not
*/
public class StopDatafeedResponse extends ActionResponse implements ToXContentObject {
public class StopDatafeedResponse implements ToXContentObject {
private static final ParseField STOPPED = new ParseField("stopped");

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* A response acknowledging the update of information for an existing model snapshot for a given job
*/
public class UpdateModelSnapshotResponse extends ActionResponse implements ToXContentObject {
public class UpdateModelSnapshotResponse implements ToXContentObject {
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
private static final ParseField MODEL = new ParseField("model");

View File

@ -41,9 +41,12 @@ public class DatafeedStats implements ToXContentObject {
private final NodeAttributes node;
@Nullable
private final String assignmentExplanation;
@Nullable
private final DatafeedTimingStats timingStats;
public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
public static final ParseField NODE = new ParseField("node");
public static final ParseField TIMING_STATS = new ParseField("timing_stats");
public static final ConstructingObjectParser<DatafeedStats, Void> PARSER = new ConstructingObjectParser<>("datafeed_stats",
true,
@ -52,7 +55,8 @@ public class DatafeedStats implements ToXContentObject {
DatafeedState datafeedState = DatafeedState.fromString((String)a[1]);
NodeAttributes nodeAttributes = (NodeAttributes)a[2];
String assignmentExplanation = (String)a[3];
return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation);
DatafeedTimingStats timingStats = (DatafeedTimingStats)a[4];
return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation, timingStats);
} );
static {
@ -60,14 +64,16 @@ public class DatafeedStats implements ToXContentObject {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedTimingStats.PARSER, TIMING_STATS);
}
public DatafeedStats(String datafeedId, DatafeedState datafeedState, @Nullable NodeAttributes node,
@Nullable String assignmentExplanation) {
@Nullable String assignmentExplanation, @Nullable DatafeedTimingStats timingStats) {
this.datafeedId = Objects.requireNonNull(datafeedId);
this.datafeedState = Objects.requireNonNull(datafeedState);
this.node = node;
this.assignmentExplanation = assignmentExplanation;
this.timingStats = timingStats;
}
public String getDatafeedId() {
@ -86,6 +92,10 @@ public class DatafeedStats implements ToXContentObject {
return assignmentExplanation;
}
public DatafeedTimingStats getDatafeedTimingStats() {
return timingStats;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
@ -110,13 +120,16 @@ public class DatafeedStats implements ToXContentObject {
if (assignmentExplanation != null) {
builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
}
if (timingStats != null) {
builder.field(TIMING_STATS.getPreferredName(), timingStats);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation);
return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation, timingStats);
}
@Override
@ -131,6 +144,7 @@ public class DatafeedStats implements ToXContentObject {
return Objects.equals(datafeedId, other.datafeedId) &&
Objects.equals(this.datafeedState, other.datafeedState) &&
Objects.equals(this.node, other.node) &&
Objects.equals(this.assignmentExplanation, other.assignmentExplanation);
Objects.equals(this.assignmentExplanation, other.assignmentExplanation) &&
Objects.equals(this.timingStats, other.timingStats);
}
}

View File

@ -0,0 +1,152 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DatafeedTimingStats implements ToXContentObject {
public static final ParseField JOB_ID = new ParseField("job_id");
public static final ParseField SEARCH_COUNT = new ParseField("search_count");
public static final ParseField BUCKET_COUNT = new ParseField("bucket_count");
public static final ParseField TOTAL_SEARCH_TIME_MS = new ParseField("total_search_time_ms");
public static final ParseField AVG_SEARCH_TIME_PER_BUCKET_MS = new ParseField("average_search_time_per_bucket_ms");
public static final ParseField TYPE = new ParseField("datafeed_timing_stats");
public static final ConstructingObjectParser<DatafeedTimingStats, Void> PARSER = createParser();
private static ConstructingObjectParser<DatafeedTimingStats, Void> createParser() {
ConstructingObjectParser<DatafeedTimingStats, Void> parser =
new ConstructingObjectParser<>(
"datafeed_timing_stats",
true,
args -> {
String jobId = (String) args[0];
Long searchCount = (Long) args[1];
Long bucketCount = (Long) args[2];
Double totalSearchTimeMs = (Double) args[3];
Double avgSearchTimePerBucketMs = (Double) args[4];
return new DatafeedTimingStats(
jobId,
getOrDefault(searchCount, 0L),
getOrDefault(bucketCount, 0L),
getOrDefault(totalSearchTimeMs, 0.0),
avgSearchTimePerBucketMs);
});
parser.declareString(constructorArg(), JOB_ID);
parser.declareLong(optionalConstructorArg(), SEARCH_COUNT);
parser.declareLong(optionalConstructorArg(), BUCKET_COUNT);
parser.declareDouble(optionalConstructorArg(), TOTAL_SEARCH_TIME_MS);
parser.declareDouble(optionalConstructorArg(), AVG_SEARCH_TIME_PER_BUCKET_MS);
return parser;
}
private final String jobId;
private long searchCount;
private long bucketCount;
private double totalSearchTimeMs;
private Double avgSearchTimePerBucketMs;
public DatafeedTimingStats(
String jobId, long searchCount, long bucketCount, double totalSearchTimeMs, @Nullable Double avgSearchTimePerBucketMs) {
this.jobId = Objects.requireNonNull(jobId);
this.searchCount = searchCount;
this.bucketCount = bucketCount;
this.totalSearchTimeMs = totalSearchTimeMs;
this.avgSearchTimePerBucketMs = avgSearchTimePerBucketMs;
}
public String getJobId() {
return jobId;
}
public long getSearchCount() {
return searchCount;
}
public long getBucketCount() {
return bucketCount;
}
public double getTotalSearchTimeMs() {
return totalSearchTimeMs;
}
public Double getAvgSearchTimePerBucketMs() {
return avgSearchTimePerBucketMs;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(JOB_ID.getPreferredName(), jobId);
builder.field(SEARCH_COUNT.getPreferredName(), searchCount);
builder.field(BUCKET_COUNT.getPreferredName(), bucketCount);
builder.field(TOTAL_SEARCH_TIME_MS.getPreferredName(), totalSearchTimeMs);
if (avgSearchTimePerBucketMs != null) {
builder.field(AVG_SEARCH_TIME_PER_BUCKET_MS.getPreferredName(), avgSearchTimePerBucketMs);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DatafeedTimingStats other = (DatafeedTimingStats) obj;
return Objects.equals(this.jobId, other.jobId)
&& this.searchCount == other.searchCount
&& this.bucketCount == other.bucketCount
&& this.totalSearchTimeMs == other.totalSearchTimeMs
&& Objects.equals(this.avgSearchTimePerBucketMs, other.avgSearchTimePerBucketMs);
}
@Override
public int hashCode() {
return Objects.hash(jobId, searchCount, bucketCount, totalSearchTimeMs, avgSearchTimePerBucketMs);
}
@Override
public String toString() {
return Strings.toString(this);
}
private static <T> T getOrDefault(@Nullable T value, T defaultValue) {
return value != null ? value : defaultValue;
}
}

View File

@ -287,6 +287,7 @@ public class DatafeedUpdate implements ToXContentObject {
this.delayedDataCheckConfig = config.delayedDataCheckConfig;
}
@Deprecated
public Builder setJobId(String jobId) {
this.jobId = jobId;
return this;

View File

@ -18,6 +18,9 @@
*/
package org.elasticsearch.client.ml.dataframe.evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -38,12 +41,17 @@ public class MlEvaluationNamedXContentProvider implements NamedXContentProvider
// Evaluations
new NamedXContentRegistry.Entry(
Evaluation.class, new ParseField(BinarySoftClassification.NAME), BinarySoftClassification::fromXContent),
new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Regression.NAME), Regression::fromXContent),
// Evaluation metrics
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(AucRocMetric.NAME), AucRocMetric::fromXContent),
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(PrecisionMetric.NAME), PrecisionMetric::fromXContent),
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(RecallMetric.NAME), RecallMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class, new ParseField(MeanSquaredErrorMetric.NAME), MeanSquaredErrorMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class, new ParseField(RSquaredMetric.NAME), RSquaredMetric::fromXContent),
// Evaluation metrics results
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(AucRocMetric.NAME), AucRocMetric.Result::fromXContent),
@ -51,6 +59,10 @@ public class MlEvaluationNamedXContentProvider implements NamedXContentProvider
EvaluationMetric.Result.class, new ParseField(PrecisionMetric.NAME), PrecisionMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(RecallMetric.NAME), RecallMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(RSquaredMetric.NAME), RSquaredMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(MeanSquaredErrorMetric.NAME), MeanSquaredErrorMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric.Result::fromXContent));
}

View File

@ -0,0 +1,129 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.regression;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Calculates the mean squared error between two known numerical fields.
*
* equation: mse = 1/n * Σ(y - y´)^2
*/
public class MeanSquaredErrorMetric implements EvaluationMetric {
public static final String NAME = "mean_squared_error";
private static final ObjectParser<MeanSquaredErrorMetric, Void> PARSER =
new ObjectParser<>("mean_squared_error", true, MeanSquaredErrorMetric::new);
public static MeanSquaredErrorMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public MeanSquaredErrorMetric() {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
return true;
}
@Override
public int hashCode() {
// create static hash code from name as there are currently no unique fields per class instance
return Objects.hashCode(NAME);
}
@Override
public String getName() {
return NAME;
}
public static class Result implements EvaluationMetric.Result {
public static final ParseField ERROR = new ParseField("error");
private final double error;
public static Result fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private static final ConstructingObjectParser<Result, Void> PARSER =
new ConstructingObjectParser<>("mean_squared_error_result", true, args -> new Result((double) args[0]));
static {
PARSER.declareDouble(constructorArg(), ERROR);
}
public Result(double error) {
this.error = error;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(ERROR.getPreferredName(), error);
builder.endObject();
return builder;
}
public double getError() {
return error;
}
@Override
public String getMetricName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(that.error, this.error);
}
@Override
public int hashCode() {
return Objects.hash(error);
}
}
}

View File

@ -0,0 +1,131 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.regression;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Calculates R-Squared between two known numerical fields.
*
* equation: mse = 1 - SSres/SStot
* such that,
* SSres = Σ(y - y´)^2
* SStot = Σ(y - y_mean)^2
*/
public class RSquaredMetric implements EvaluationMetric {
public static final String NAME = "r_squared";
private static final ObjectParser<RSquaredMetric, Void> PARSER =
new ObjectParser<>("r_squared", true, RSquaredMetric::new);
public static RSquaredMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public RSquaredMetric() {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
return true;
}
@Override
public int hashCode() {
// create static hash code from name as there are currently no unique fields per class instance
return Objects.hashCode(NAME);
}
@Override
public String getName() {
return NAME;
}
public static class Result implements EvaluationMetric.Result {
public static final ParseField VALUE = new ParseField("value");
private final double value;
public static Result fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private static final ConstructingObjectParser<Result, Void> PARSER =
new ConstructingObjectParser<>("r_squared_result", true, args -> new Result((double) args[0]));
static {
PARSER.declareDouble(constructorArg(), VALUE);
}
public Result(double value) {
this.value = value;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(VALUE.getPreferredName(), value);
builder.endObject();
return builder;
}
public double getValue() {
return value;
}
@Override
public String getMetricName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(that.value, this.value);
}
@Override
public int hashCode() {
return Objects.hash(value);
}
}
}

View File

@ -0,0 +1,133 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.regression;
import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
/**
* Evaluation of regression results.
*/
public class Regression implements Evaluation {
public static final String NAME = "regression";
private static final ParseField ACTUAL_FIELD = new ParseField("actual_field");
private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field");
private static final ParseField METRICS = new ParseField("metrics");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<Regression, Void> PARSER = new ConstructingObjectParser<>(
NAME, true, a -> new Regression((String) a[0], (String) a[1], (List<EvaluationMetric>) a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), ACTUAL_FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), PREDICTED_FIELD);
PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(),
(p, c, n) -> p.namedObject(EvaluationMetric.class, n, c), METRICS);
}
public static Regression fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* The field containing the actual value
* The value of this field is assumed to be numeric
*/
private final String actualField;
/**
* The field containing the predicted value
* The value of this field is assumed to be numeric
*/
private final String predictedField;
/**
* The list of metrics to calculate
*/
private final List<EvaluationMetric> metrics;
public Regression(String actualField, String predictedField) {
this(actualField, predictedField, (List<EvaluationMetric>)null);
}
public Regression(String actualField, String predictedField, EvaluationMetric... metrics) {
this(actualField, predictedField, Arrays.asList(metrics));
}
public Regression(String actualField, String predictedField, @Nullable List<EvaluationMetric> metrics) {
this.actualField = Objects.requireNonNull(actualField);
this.predictedField = Objects.requireNonNull(predictedField);
if (metrics != null) {
metrics.sort(Comparator.comparing(EvaluationMetric::getName));
}
this.metrics = metrics;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(ACTUAL_FIELD.getPreferredName(), actualField);
builder.field(PREDICTED_FIELD.getPreferredName(), predictedField);
if (metrics != null) {
builder.startObject(METRICS.getPreferredName());
for (EvaluationMetric metric : metrics) {
builder.field(metric.getName(), metric);
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Regression that = (Regression) o;
return Objects.equals(that.actualField, this.actualField)
&& Objects.equals(that.predictedField, this.predictedField)
&& Objects.equals(that.metrics, this.metrics);
}
@Override
public int hashCode() {
return Objects.hash(actualField, predictedField, metrics);
}
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
@ -52,6 +53,7 @@ public class BinarySoftClassification implements Evaluation {
public static final ConstructingObjectParser<BinarySoftClassification, Void> PARSER =
new ConstructingObjectParser<>(
NAME,
true,
args -> new BinarySoftClassification((String) args[0], (String) args[1], (List<EvaluationMetric>) args[2]));
static {
@ -80,6 +82,10 @@ public class BinarySoftClassification implements Evaluation {
*/
private final List<EvaluationMetric> metrics;
public BinarySoftClassification(String actualField, String predictedField) {
this(actualField, predictedField, (List<EvaluationMetric>)null);
}
public BinarySoftClassification(String actualField, String predictedProbabilityField, EvaluationMetric... metric) {
this(actualField, predictedProbabilityField, Arrays.asList(metric));
}
@ -88,7 +94,10 @@ public class BinarySoftClassification implements Evaluation {
@Nullable List<EvaluationMetric> metrics) {
this.actualField = Objects.requireNonNull(actualField);
this.predictedProbabilityField = Objects.requireNonNull(predictedProbabilityField);
this.metrics = Objects.requireNonNull(metrics);
if (metrics != null) {
metrics.sort(Comparator.comparing(EvaluationMetric::getName));
}
this.metrics = metrics;
}
@Override
@ -102,11 +111,13 @@ public class BinarySoftClassification implements Evaluation {
builder.field(ACTUAL_FIELD.getPreferredName(), actualField);
builder.field(PREDICTED_PROBABILITY_FIELD.getPreferredName(), predictedProbabilityField);
builder.startObject(METRICS.getPreferredName());
for (EvaluationMetric metric : metrics) {
builder.field(metric.getName(), metric);
if (metrics != null) {
builder.startObject(METRICS.getPreferredName());
for (EvaluationMetric metric : metrics) {
builder.field(metric.getName(), metric);
}
builder.endObject();
}
builder.endObject();
builder.endObject();
return builder;

View File

@ -39,6 +39,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
public class TimingStats implements ToXContentObject {
public static final ParseField BUCKET_COUNT = new ParseField("bucket_count");
public static final ParseField TOTAL_BUCKET_PROCESSING_TIME_MS = new ParseField("total_bucket_processing_time_ms");
public static final ParseField MIN_BUCKET_PROCESSING_TIME_MS = new ParseField("minimum_bucket_processing_time_ms");
public static final ParseField MAX_BUCKET_PROCESSING_TIME_MS = new ParseField("maximum_bucket_processing_time_ms");
public static final ParseField AVG_BUCKET_PROCESSING_TIME_MS = new ParseField("average_bucket_processing_time_ms");
@ -49,12 +50,28 @@ public class TimingStats implements ToXContentObject {
new ConstructingObjectParser<>(
"timing_stats",
true,
args ->
new TimingStats((String) args[0], (long) args[1], (Double) args[2], (Double) args[3], (Double) args[4], (Double) args[5]));
args -> {
String jobId = (String) args[0];
Long bucketCount = (Long) args[1];
Double totalBucketProcessingTimeMs = (Double) args[2];
Double minBucketProcessingTimeMs = (Double) args[3];
Double maxBucketProcessingTimeMs = (Double) args[4];
Double avgBucketProcessingTimeMs = (Double) args[5];
Double exponentialAvgBucketProcessingTimeMs = (Double) args[6];
return new TimingStats(
jobId,
getOrDefault(bucketCount, 0L),
getOrDefault(totalBucketProcessingTimeMs, 0.0),
minBucketProcessingTimeMs,
maxBucketProcessingTimeMs,
avgBucketProcessingTimeMs,
exponentialAvgBucketProcessingTimeMs);
});
static {
PARSER.declareString(constructorArg(), Job.ID);
PARSER.declareLong(constructorArg(), BUCKET_COUNT);
PARSER.declareLong(optionalConstructorArg(), BUCKET_COUNT);
PARSER.declareDouble(optionalConstructorArg(), TOTAL_BUCKET_PROCESSING_TIME_MS);
PARSER.declareDouble(optionalConstructorArg(), MIN_BUCKET_PROCESSING_TIME_MS);
PARSER.declareDouble(optionalConstructorArg(), MAX_BUCKET_PROCESSING_TIME_MS);
PARSER.declareDouble(optionalConstructorArg(), AVG_BUCKET_PROCESSING_TIME_MS);
@ -63,6 +80,7 @@ public class TimingStats implements ToXContentObject {
private final String jobId;
private long bucketCount;
private double totalBucketProcessingTimeMs;
private Double minBucketProcessingTimeMs;
private Double maxBucketProcessingTimeMs;
private Double avgBucketProcessingTimeMs;
@ -71,12 +89,14 @@ public class TimingStats implements ToXContentObject {
public TimingStats(
String jobId,
long bucketCount,
double totalBucketProcessingTimeMs,
@Nullable Double minBucketProcessingTimeMs,
@Nullable Double maxBucketProcessingTimeMs,
@Nullable Double avgBucketProcessingTimeMs,
@Nullable Double exponentialAvgBucketProcessingTimeMs) {
this.jobId = jobId;
this.bucketCount = bucketCount;
this.totalBucketProcessingTimeMs = totalBucketProcessingTimeMs;
this.minBucketProcessingTimeMs = minBucketProcessingTimeMs;
this.maxBucketProcessingTimeMs = maxBucketProcessingTimeMs;
this.avgBucketProcessingTimeMs = avgBucketProcessingTimeMs;
@ -91,6 +111,10 @@ public class TimingStats implements ToXContentObject {
return bucketCount;
}
public double getTotalBucketProcessingTimeMs() {
return totalBucketProcessingTimeMs;
}
public Double getMinBucketProcessingTimeMs() {
return minBucketProcessingTimeMs;
}
@ -112,6 +136,7 @@ public class TimingStats implements ToXContentObject {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(BUCKET_COUNT.getPreferredName(), bucketCount);
builder.field(TOTAL_BUCKET_PROCESSING_TIME_MS.getPreferredName(), totalBucketProcessingTimeMs);
if (minBucketProcessingTimeMs != null) {
builder.field(MIN_BUCKET_PROCESSING_TIME_MS.getPreferredName(), minBucketProcessingTimeMs);
}
@ -135,6 +160,7 @@ public class TimingStats implements ToXContentObject {
TimingStats that = (TimingStats) o;
return Objects.equals(this.jobId, that.jobId)
&& this.bucketCount == that.bucketCount
&& this.totalBucketProcessingTimeMs == that.totalBucketProcessingTimeMs
&& Objects.equals(this.minBucketProcessingTimeMs, that.minBucketProcessingTimeMs)
&& Objects.equals(this.maxBucketProcessingTimeMs, that.maxBucketProcessingTimeMs)
&& Objects.equals(this.avgBucketProcessingTimeMs, that.avgBucketProcessingTimeMs)
@ -146,6 +172,7 @@ public class TimingStats implements ToXContentObject {
return Objects.hash(
jobId,
bucketCount,
totalBucketProcessingTimeMs,
minBucketProcessingTimeMs,
maxBucketProcessingTimeMs,
avgBucketProcessingTimeMs,
@ -156,4 +183,8 @@ public class TimingStats implements ToXContentObject {
public String toString() {
return Strings.toString(this);
}
private static <T> T getOrDefault(@Nullable T value, T defaultValue) {
return value != null ? value : defaultValue;
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.snapshotlifecycle;
import org.elasticsearch.client.TimedRequest;
import java.util.Objects;
public class DeleteSnapshotLifecyclePolicyRequest extends TimedRequest {
private final String policyId;
public DeleteSnapshotLifecyclePolicyRequest(String policyId) {
this.policyId = policyId;
}
public String getPolicyId() {
return this.policyId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteSnapshotLifecyclePolicyRequest other = (DeleteSnapshotLifecyclePolicyRequest) o;
return this.policyId.equals(other.policyId);
}
@Override
public int hashCode() {
return Objects.hash(this.policyId);
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.snapshotlifecycle;
import org.elasticsearch.client.TimedRequest;
import java.util.Objects;
public class ExecuteSnapshotLifecyclePolicyRequest extends TimedRequest {
private final String policyId;
public ExecuteSnapshotLifecyclePolicyRequest(String policyId) {
this.policyId = policyId;
}
public String getPolicyId() {
return this.policyId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ExecuteSnapshotLifecyclePolicyRequest other = (ExecuteSnapshotLifecyclePolicyRequest) o;
return this.policyId.equals(other.policyId);
}
@Override
public int hashCode() {
return Objects.hash(this.policyId);
}
}

Some files were not shown because too many files have changed in this diff Show More