Merge branch 'master' into zen2
This commit is contained in:
commit
a2cd8f731e
|
@ -95,7 +95,7 @@ Contributing to the Elasticsearch codebase
|
|||
JDK 10 is required to build Elasticsearch. You must have a JDK 10 installation
|
||||
with the environment variable `JAVA_HOME` referencing the path to Java home for
|
||||
your JDK 10 installation. By default, tests use the same runtime as `JAVA_HOME`.
|
||||
However, since Elasticsearch, supports JDK 8 the build supports compiling with
|
||||
However, since Elasticsearch supports JDK 8, the build supports compiling with
|
||||
JDK 10 and testing on a JDK 8 runtime; to do this, set `RUNTIME_JAVA_HOME`
|
||||
pointing to the Java home of a JDK 8 installation. Note that this mechanism can
|
||||
be used to test against other JDKs as well, this is not only limited to JDK 8.
|
||||
|
@ -325,21 +325,19 @@ common configurations in our build and how we use them:
|
|||
|
||||
<dl>
|
||||
<dt>`compile`</dt><dd>Code that is on the classpath at both compile and
|
||||
runtime. If the [`shadow`][shadow-plugin] plugin is applied to the project then
|
||||
this code is bundled into the jar produced by the project.</dd>
|
||||
runtime.</dd>
|
||||
<dt>`runtime`</dt><dd>Code that is not on the classpath at compile time but is
|
||||
on the classpath at runtime. We mostly use this configuration to make sure that
|
||||
we do not accidentally compile against dependencies of our dependencies also
|
||||
known as "transitive" dependencies".</dd>
|
||||
<dt>`compileOnly`</dt><dd>Code that is on the classpath at comile time but that
|
||||
<dt>`compileOnly`</dt><dd>Code that is on the classpath at compile time but that
|
||||
should not be shipped with the project because it is "provided" by the runtime
|
||||
somehow. Elasticsearch plugins use this configuration to include dependencies
|
||||
that are bundled with Elasticsearch's server.</dd>
|
||||
<dt>`shadow`</dt><dd>Only available in projects with the shadow plugin. Code
|
||||
that is on the classpath at both compile and runtime but it *not* bundled into
|
||||
the jar produced by the project. If you depend on a project with the `shadow`
|
||||
plugin then you need to depend on this configuration because it will bring
|
||||
along all of the dependencies you need at runtime.</dd>
|
||||
<dt>`bundle`</dt><dd>Only available in projects with the shadow plugin,
|
||||
dependencies with this configuration are bundled into the jar produced by the
|
||||
build. Since IDEs do not understand this configuration we rig them to treat
|
||||
dependencies in this configuration as `compile` dependencies.</dd>
|
||||
<dt>`testCompile`</dt><dd>Code that is on the classpath for compiling tests
|
||||
that are part of this project but not production code. The canonical example
|
||||
of this is `junit`.</dd>
|
||||
|
|
|
@ -21,10 +21,7 @@ apply plugin: 'elasticsearch.build'
|
|||
apply plugin: 'application'
|
||||
mainClassName = 'org.openjdk.jmh.Main'
|
||||
|
||||
// Not published so no need to assemble
|
||||
tasks.remove(assemble)
|
||||
build.dependsOn.remove('assemble')
|
||||
|
||||
assemble.enabled = false
|
||||
archivesBaseName = 'elasticsearch-benchmarks'
|
||||
|
||||
test.enabled = false
|
||||
|
|
118
build.gradle
118
build.gradle
|
@ -16,21 +16,15 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionCollection
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.gradle.plugins.ide.eclipse.model.SourceFolder
|
||||
import org.gradle.util.GradleVersion
|
||||
import org.gradle.util.DistributionLocator
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.apache.tools.ant.filters.ReplaceTokens
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.MessageDigest
|
||||
|
||||
plugins {
|
||||
id 'com.gradle.build-scan' version '1.13.2'
|
||||
|
@ -304,7 +298,7 @@ subprojects {
|
|||
// org.elasticsearch:elasticsearch must be the last one or all the links for the
|
||||
// other packages (e.g org.elasticsearch.client) will point to server rather than
|
||||
// their own artifacts.
|
||||
if (project.plugins.hasPlugin(BuildPlugin)) {
|
||||
if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) {
|
||||
String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
|
||||
Closure sortClosure = { a, b -> b.group <=> a.group }
|
||||
Closure depJavadocClosure = { shadowed, dep ->
|
||||
|
@ -322,13 +316,6 @@ subprojects {
|
|||
*/
|
||||
project.evaluationDependsOn(upstreamProject.path)
|
||||
project.javadoc.source += upstreamProject.javadoc.source
|
||||
/*
|
||||
* Do not add those projects to the javadoc classpath because
|
||||
* we are going to resolve them with their source instead.
|
||||
*/
|
||||
project.javadoc.classpath = project.javadoc.classpath.filter { f ->
|
||||
false == upstreamProject.configurations.archives.artifacts.files.files.contains(f)
|
||||
}
|
||||
/*
|
||||
* Instead we need the upstream project's javadoc classpath so
|
||||
* we don't barf on the classes that it references.
|
||||
|
@ -345,16 +332,16 @@ subprojects {
|
|||
project.configurations.compile.dependencies
|
||||
.findAll()
|
||||
.toSorted(sortClosure)
|
||||
.each({ c -> depJavadocClosure(hasShadow, c) })
|
||||
.each({ c -> depJavadocClosure(false, c) })
|
||||
project.configurations.compileOnly.dependencies
|
||||
.findAll()
|
||||
.toSorted(sortClosure)
|
||||
.each({ c -> depJavadocClosure(hasShadow, c) })
|
||||
.each({ c -> depJavadocClosure(false, c) })
|
||||
if (hasShadow) {
|
||||
project.configurations.shadow.dependencies
|
||||
project.configurations.bundle.dependencies
|
||||
.findAll()
|
||||
.toSorted(sortClosure)
|
||||
.each({ c -> depJavadocClosure(false, c) })
|
||||
.each({ c -> depJavadocClosure(true, c) })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -518,28 +505,31 @@ allprojects {
|
|||
tasks.cleanEclipse.dependsOn(wipeEclipseSettings)
|
||||
// otherwise the eclipse merging is *super confusing*
|
||||
tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings)
|
||||
|
||||
// work arround https://github.com/gradle/gradle/issues/6582
|
||||
tasks.eclipseProject.mustRunAfter tasks.cleanEclipseProject
|
||||
tasks.matching { it.name == 'eclipseClasspath' }.all {
|
||||
it.mustRunAfter { tasks.cleanEclipseClasspath }
|
||||
}
|
||||
tasks.matching { it.name == 'eclipseJdt' }.all {
|
||||
it.mustRunAfter { tasks.cleanEclipseJdt }
|
||||
}
|
||||
tasks.copyEclipseSettings.mustRunAfter tasks.wipeEclipseSettings
|
||||
}
|
||||
|
||||
allprojects {
|
||||
/*
|
||||
* IntelliJ and Eclipse don't know about the shadow plugin so when we're
|
||||
* in "IntelliJ mode" or "Eclipse mode" add "runtime" dependencies
|
||||
* eveywhere where we see a "shadow" dependency which will cause them to
|
||||
* reference shadowed projects directly rather than rely on the shadowing
|
||||
* to include them. This is the correct thing for it to do because it
|
||||
* doesn't run the jar shadowing at all. This isn't needed for the project
|
||||
* in "IntelliJ mode" or "Eclipse mode" switch "bundle" dependencies into
|
||||
* regular "compile" dependencies. This isn't needed for the project
|
||||
* itself because the IDE configuration is done by SourceSets but it is
|
||||
* *is* needed for projects that depends on the project doing the shadowing.
|
||||
* Without this they won't properly depend on the shadowed project.
|
||||
*/
|
||||
if (isEclipse || isIdea) {
|
||||
configurations.all { Configuration configuration ->
|
||||
dependencies.all { Dependency dep ->
|
||||
if (dep instanceof ProjectDependency) {
|
||||
if (dep.getTargetConfiguration() == 'shadow') {
|
||||
configuration.dependencies.add(project.dependencies.project(path: dep.dependencyProject.path, configuration: 'runtime'))
|
||||
}
|
||||
}
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
project.afterEvaluate {
|
||||
project.configurations.compile.extendsFrom project.configurations.bundle
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -582,62 +572,6 @@ wrapper {
|
|||
}
|
||||
}
|
||||
|
||||
static void assertLinesInFile(final Path path, final List<String> expectedLines) {
|
||||
final List<String> actualLines = Files.readAllLines(path)
|
||||
int line = 0
|
||||
for (final String expectedLine : expectedLines) {
|
||||
final String actualLine = actualLines.get(line)
|
||||
if (expectedLine != actualLine) {
|
||||
throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]")
|
||||
}
|
||||
line++
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Check that all generated JARs have our NOTICE.txt and an appropriate
|
||||
* LICENSE.txt in them. We configurate this in gradle but we'd like to
|
||||
* be extra paranoid.
|
||||
*/
|
||||
subprojects { project ->
|
||||
project.tasks.withType(Jar).whenTaskAdded { jarTask ->
|
||||
final Task extract = project.task("extract${jarTask.name.capitalize()}", type: LoggedExec) {
|
||||
dependsOn jarTask
|
||||
ext.destination = project.buildDir.toPath().resolve("jar-extracted/${jarTask.name}")
|
||||
commandLine "${->new File(rootProject.compilerJavaHome, 'bin/jar')}",
|
||||
'xf', "${-> jarTask.outputs.files.singleFile}", 'META-INF/LICENSE.txt', 'META-INF/NOTICE.txt'
|
||||
workingDir destination
|
||||
onlyIf {jarTask.enabled}
|
||||
doFirst {
|
||||
project.delete(destination)
|
||||
Files.createDirectories(destination)
|
||||
}
|
||||
}
|
||||
|
||||
final Task checkNotice = project.task("verify${jarTask.name.capitalize()}Notice") {
|
||||
dependsOn extract
|
||||
onlyIf {jarTask.enabled}
|
||||
doLast {
|
||||
final List<String> noticeLines = Files.readAllLines(project.noticeFile.toPath())
|
||||
final Path noticePath = extract.destination.resolve('META-INF/NOTICE.txt')
|
||||
assertLinesInFile(noticePath, noticeLines)
|
||||
}
|
||||
}
|
||||
project.check.dependsOn checkNotice
|
||||
|
||||
final Task checkLicense = project.task("verify${jarTask.name.capitalize()}License") {
|
||||
dependsOn extract
|
||||
onlyIf {jarTask.enabled}
|
||||
doLast {
|
||||
final List<String> licenseLines = Files.readAllLines(project.licenseFile.toPath())
|
||||
final Path licensePath = extract.destination.resolve('META-INF/LICENSE.txt')
|
||||
assertLinesInFile(licensePath, licenseLines)
|
||||
}
|
||||
}
|
||||
project.check.dependsOn checkLicense
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove assemble/dependenciesInfo on all qa projects because we don't need to publish
|
||||
* artifacts for them. */
|
||||
gradle.projectsEvaluated {
|
||||
|
@ -645,13 +579,11 @@ gradle.projectsEvaluated {
|
|||
if (project.path.startsWith(':qa')) {
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
project.tasks.remove(assemble)
|
||||
project.build.dependsOn.remove('assemble')
|
||||
assemble.enabled = false
|
||||
}
|
||||
Task dependenciesInfo = project.tasks.findByName('dependenciesInfo')
|
||||
if (dependenciesInfo) {
|
||||
project.tasks.remove(dependenciesInfo)
|
||||
project.precommit.dependsOn.remove('dependenciesInfo')
|
||||
dependenciesInfo.enabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
import java.nio.file.Files
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
plugins {
|
||||
id 'java-gradle-plugin'
|
||||
|
@ -102,7 +103,6 @@ dependencies {
|
|||
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
|
||||
compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r'
|
||||
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
|
||||
compile 'de.thetaphi:forbiddenapis:2.5'
|
||||
compile 'org.apache.rat:apache-rat:0.11'
|
||||
compile "org.elasticsearch:jna:4.5.1"
|
||||
compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
|
||||
|
@ -167,7 +167,6 @@ if (project != rootProject) {
|
|||
it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'}
|
||||
}
|
||||
exclude "**/*Tests.class"
|
||||
include "**/*IT.class"
|
||||
testClassesDirs = sourceSets.test.output.classesDirs
|
||||
classpath = sourceSets.test.runtimeClasspath
|
||||
inputs.dir(file("src/testKit"))
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.gradle.api.artifacts.ModuleDependency
|
|||
import org.gradle.api.artifacts.ModuleVersionIdentifier
|
||||
import org.gradle.api.artifacts.ProjectDependency
|
||||
import org.gradle.api.artifacts.ResolvedArtifact
|
||||
import org.gradle.api.artifacts.SelfResolvingDependency
|
||||
import org.gradle.api.artifacts.dsl.RepositoryHandler
|
||||
import org.gradle.api.execution.TaskExecutionGraph
|
||||
import org.gradle.api.plugins.JavaPlugin
|
||||
|
@ -57,6 +56,7 @@ import org.gradle.util.GradleVersion
|
|||
import java.nio.charset.StandardCharsets
|
||||
import java.time.ZoneOffset
|
||||
import java.time.ZonedDateTime
|
||||
|
||||
/**
|
||||
* Encapsulates build configuration for elasticsearch projects.
|
||||
*/
|
||||
|
@ -79,8 +79,9 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
project.pluginManager.apply('java')
|
||||
project.pluginManager.apply('carrotsearch.randomized-testing')
|
||||
// these plugins add lots of info to our jars
|
||||
configureConfigurations(project)
|
||||
configureJars(project) // jar config must be added before info broker
|
||||
// these plugins add lots of info to our jars
|
||||
project.pluginManager.apply('nebula.info-broker')
|
||||
project.pluginManager.apply('nebula.info-basic')
|
||||
project.pluginManager.apply('nebula.info-java')
|
||||
|
@ -91,8 +92,8 @@ class BuildPlugin implements Plugin<Project> {
|
|||
|
||||
globalBuildInfo(project)
|
||||
configureRepositories(project)
|
||||
configureConfigurations(project)
|
||||
project.ext.versions = VersionProperties.versions
|
||||
configureSourceSets(project)
|
||||
configureCompile(project)
|
||||
configureJavadoc(project)
|
||||
configureSourcesJar(project)
|
||||
|
@ -211,6 +212,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion
|
||||
project.rootProject.ext.inFipsJvm = inFipsJvm
|
||||
project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion)
|
||||
project.rootProject.ext.java9Home = "${-> findJavaHome("9")}"
|
||||
}
|
||||
|
||||
project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion
|
||||
|
@ -224,6 +226,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.ext.javaVersions = project.rootProject.ext.javaVersions
|
||||
project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm
|
||||
project.ext.gradleJavaVersion = project.rootProject.ext.gradleJavaVersion
|
||||
project.ext.java9Home = project.rootProject.ext.java9Home
|
||||
}
|
||||
|
||||
private static String getPaddedMajorVersion(JavaVersion compilerJavaVersionEnum) {
|
||||
|
@ -421,8 +424,10 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.configurations.compile.dependencies.all(disableTransitiveDeps)
|
||||
project.configurations.testCompile.dependencies.all(disableTransitiveDeps)
|
||||
project.configurations.compileOnly.dependencies.all(disableTransitiveDeps)
|
||||
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
project.configurations.shadow.dependencies.all(disableTransitiveDeps)
|
||||
Configuration bundle = project.configurations.create('bundle')
|
||||
bundle.dependencies.all(disableTransitiveDeps)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -528,16 +533,21 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
|
||||
// The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it,
|
||||
// just make a copy.
|
||||
generatePOMTask.ext.pomFileName = null
|
||||
doLast {
|
||||
project.copy {
|
||||
from generatePOMTask.destination
|
||||
into "${project.buildDir}/distributions"
|
||||
rename { "${project.archivesBaseName}-${project.version}.pom" }
|
||||
rename {
|
||||
generatePOMTask.ext.pomFileName == null ?
|
||||
"${project.archivesBaseName}-${project.version}.pom" :
|
||||
generatePOMTask.ext.pomFileName
|
||||
}
|
||||
}
|
||||
}
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
if (assemble && assemble.enabled) {
|
||||
assemble.dependsOn(generatePOMTask)
|
||||
}
|
||||
}
|
||||
|
@ -555,32 +565,22 @@ class BuildPlugin implements Plugin<Project> {
|
|||
publications {
|
||||
nebula(MavenPublication) {
|
||||
artifacts = [ project.tasks.shadowJar ]
|
||||
artifactId = project.archivesBaseName
|
||||
/*
|
||||
* Configure the pom to include the "shadow" as compile dependencies
|
||||
* because that is how we're using them but remove all other dependencies
|
||||
* because they've been shaded into the jar.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add dependencies that we are going to bundle to the compile classpath.
|
||||
*/
|
||||
pom.withXml { XmlProvider xml ->
|
||||
Node root = xml.asNode()
|
||||
root.remove(root.dependencies)
|
||||
Node dependenciesNode = root.appendNode('dependencies')
|
||||
project.configurations.shadow.allDependencies.each {
|
||||
if (false == it instanceof SelfResolvingDependency) {
|
||||
Node dependencyNode = dependenciesNode.appendNode('dependency')
|
||||
dependencyNode.appendNode('groupId', it.group)
|
||||
dependencyNode.appendNode('artifactId', it.name)
|
||||
dependencyNode.appendNode('version', it.version)
|
||||
dependencyNode.appendNode('scope', 'compile')
|
||||
}
|
||||
}
|
||||
// Be tidy and remove the element if it is empty
|
||||
if (dependenciesNode.children.empty) {
|
||||
root.remove(dependenciesNode)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
static void configureSourceSets(Project project) {
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
['main', 'test'].each {name ->
|
||||
SourceSet sourceSet = project.sourceSets.findByName(name)
|
||||
if (sourceSet != null) {
|
||||
sourceSet.compileClasspath += project.configurations.bundle
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -603,7 +603,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
} else {
|
||||
options.fork = true
|
||||
options.forkOptions.javaHome = compilerJavaHomeFile
|
||||
options.forkOptions.memoryMaximumSize = "512m"
|
||||
}
|
||||
if (targetCompatibilityVersion == JavaVersion.VERSION_1_8) {
|
||||
// compile with compact 3 profile by default
|
||||
|
@ -741,6 +740,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
from(project.noticeFile.parent) {
|
||||
include project.noticeFile.name
|
||||
rename { 'NOTICE.txt' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -763,9 +763,16 @@ class BuildPlugin implements Plugin<Project> {
|
|||
* better to be safe
|
||||
*/
|
||||
mergeServiceFiles()
|
||||
/*
|
||||
* Bundle dependencies of the "bundled" configuration.
|
||||
*/
|
||||
configurations = [project.configurations.bundle]
|
||||
}
|
||||
// Make sure we assemble the shadow jar
|
||||
project.tasks.assemble.dependsOn project.tasks.shadowJar
|
||||
project.artifacts {
|
||||
apiElements project.tasks.shadowJar
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -798,8 +805,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
systemProperty 'tests.task', path
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
systemProperty 'jna.nosys', 'true'
|
||||
// TODO: remove this deprecation compatibility setting for 7.0
|
||||
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false'
|
||||
systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion()
|
||||
if (project.ext.inFipsJvm) {
|
||||
systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS"
|
||||
|
@ -872,13 +877,8 @@ class BuildPlugin implements Plugin<Project> {
|
|||
exclude '**/*$*.class'
|
||||
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
/*
|
||||
* If we make a shaded jar we test against it.
|
||||
*/
|
||||
// Test against a shadow jar if we made one
|
||||
classpath -= project.tasks.compileJava.outputs.files
|
||||
classpath -= project.configurations.compile
|
||||
classpath -= project.configurations.runtime
|
||||
classpath += project.configurations.shadow
|
||||
classpath += project.tasks.shadowJar.outputs.files
|
||||
dependsOn project.tasks.shadowJar
|
||||
}
|
||||
|
@ -904,26 +904,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
additionalTest.dependsOn(project.tasks.testClasses)
|
||||
project.check.dependsOn(additionalTest)
|
||||
});
|
||||
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
/*
|
||||
* We need somewhere to configure dependencies that we don't wish
|
||||
* to shade into the jar. The shadow plugin creates a "shadow"
|
||||
* configuration which is *almost* exactly that. It is never
|
||||
* bundled into the shaded jar but is used for main source
|
||||
* compilation. Unfortunately, by default it is not used for
|
||||
* *test* source compilation and isn't used in tests at all. This
|
||||
* change makes it available for test compilation.
|
||||
*
|
||||
* Note that this isn't going to work properly with qa projects
|
||||
* but they have no business applying the shadow plugin in the
|
||||
* firstplace.
|
||||
*/
|
||||
SourceSet testSourceSet = project.sourceSets.findByName('test')
|
||||
if (testSourceSet != null) {
|
||||
testSourceSet.compileClasspath += project.configurations.shadow
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static configurePrecommit(Project project) {
|
||||
|
@ -935,7 +915,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
it.group.startsWith('org.elasticsearch') == false
|
||||
} - project.configurations.compileOnly
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
project.dependencyLicenses.dependencies += project.configurations.shadow.fileCollection {
|
||||
project.dependencyLicenses.dependencies += project.configurations.bundle.fileCollection {
|
||||
it.group.startsWith('org.elasticsearch') == false
|
||||
}
|
||||
}
|
||||
|
@ -946,7 +926,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
deps.runtimeConfiguration = project.configurations.runtime
|
||||
project.plugins.withType(ShadowPlugin).whenPluginAdded {
|
||||
deps.runtimeConfiguration = project.configurations.create('infoDeps')
|
||||
deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.shadow)
|
||||
deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.bundle)
|
||||
}
|
||||
deps.compileOnlyConfiguration = project.configurations.compileOnly
|
||||
project.afterEvaluate {
|
||||
|
|
|
@ -138,9 +138,8 @@ class VersionCollection {
|
|||
break
|
||||
}
|
||||
}
|
||||
// caveat 0 - now dip back 2 versions to get the last supported snapshot version of the line
|
||||
Version highestMinor = getHighestPreviousMinor(currentVersion.major - 1)
|
||||
maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor)
|
||||
// caveat 0 - the last supported snapshot of the line is on a version that we don't support (N-2)
|
||||
maintenanceBugfixSnapshot = null
|
||||
} else {
|
||||
// caveat 3 did not apply. version is not a X.0.0, so we are somewhere on a X.Y line
|
||||
// only check till minor == 0 of the major
|
||||
|
@ -293,7 +292,8 @@ class VersionCollection {
|
|||
* If you have a list [5.0.2, 5.1.2, 6.0.1, 6.1.1] and pass in 6 for the nextMajorVersion, it will return you 5.1.2
|
||||
*/
|
||||
private Version getHighestPreviousMinor(Integer nextMajorVersion) {
|
||||
return versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0")).last()
|
||||
SortedSet<Version> result = versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0"))
|
||||
return result.isEmpty() ? null : result.last()
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,8 +35,7 @@ public class DocsTestPlugin extends RestTestPlugin {
|
|||
// The distribution can be configured with -Dtests.distribution on the command line
|
||||
project.integTestCluster.distribution = System.getProperty('tests.distribution', 'zip')
|
||||
// Docs are published separately so no need to assemble
|
||||
project.tasks.remove(project.assemble)
|
||||
project.build.dependsOn.remove('assemble')
|
||||
project.tasks.assemble.enabled = false
|
||||
Map<String, String> defaultSubstitutions = [
|
||||
/* These match up with the asciidoc syntax for substitutions but
|
||||
* the values may differ. In particular {version} needs to resolve
|
||||
|
|
|
@ -19,23 +19,19 @@
|
|||
package org.elasticsearch.gradle.plugin
|
||||
|
||||
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
||||
import nebula.plugin.info.scm.ScmInfoPlugin
|
||||
import nebula.plugin.publishing.maven.MavenScmPlugin
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.NoticeTask
|
||||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
import org.elasticsearch.gradle.test.RunTask
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.XmlProvider
|
||||
import org.gradle.api.publish.maven.MavenPublication
|
||||
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
|
||||
import org.gradle.api.publish.maven.tasks.GenerateMavenPom
|
||||
import org.gradle.api.tasks.SourceSet
|
||||
import org.gradle.api.tasks.bundling.Zip
|
||||
import org.gradle.jvm.tasks.Jar
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardCopyOption
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
/**
|
||||
|
@ -55,16 +51,10 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
String name = project.pluginProperties.extension.name
|
||||
project.archivesBaseName = name
|
||||
|
||||
if (project.pluginProperties.extension.hasClientJar) {
|
||||
// for plugins which work with the transport client, we copy the jar
|
||||
// file to a new name, copy the nebula generated pom to the same name,
|
||||
// and generate a different pom for the zip
|
||||
addClientJarPomGeneration(project)
|
||||
addClientJarTask(project)
|
||||
}
|
||||
// while the jar isn't normally published, we still at least build a pom of deps
|
||||
// in case it is published, for instance when other plugins extend this plugin
|
||||
configureJarPom(project)
|
||||
// set teh project description so it will be picked up by publishing
|
||||
project.description = project.pluginProperties.extension.description
|
||||
|
||||
configurePublishing(project)
|
||||
|
||||
project.integTestCluster.dependsOn(project.bundlePlugin)
|
||||
project.tasks.run.dependsOn(project.bundlePlugin)
|
||||
|
@ -94,6 +84,32 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
|
||||
}
|
||||
|
||||
private void configurePublishing(Project project) {
|
||||
// Only configure publishing if applied externally
|
||||
if (project.pluginProperties.extension.hasClientJar) {
|
||||
project.plugins.apply(MavenScmPlugin.class)
|
||||
// Only change Jar tasks, we don't want a -client zip so we can't change archivesBaseName
|
||||
project.tasks.withType(Jar) {
|
||||
baseName = baseName + "-client"
|
||||
}
|
||||
// always configure publishing for client jars
|
||||
project.plugins.apply(MavenScmPlugin.class)
|
||||
project.publishing.publications.nebula(MavenPublication).artifactId(
|
||||
project.pluginProperties.extension.name + "-client"
|
||||
)
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
|
||||
generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.version}.pom"
|
||||
}
|
||||
} else {
|
||||
project.plugins.withType(MavenPublishPlugin).whenPluginAdded {
|
||||
project.publishing.publications.nebula(MavenPublication).artifactId(
|
||||
project.pluginProperties.extension.name
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private static void configureDependencies(Project project) {
|
||||
project.dependencies {
|
||||
compileOnly "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
|
||||
|
@ -141,11 +157,10 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
from pluginMetadata // metadata (eg custom security policy)
|
||||
/*
|
||||
* If the plugin is using the shadow plugin then we need to bundle
|
||||
* "shadow" things rather than the default jar and dependencies so
|
||||
* we don't hit jar hell.
|
||||
* that shadow jar.
|
||||
*/
|
||||
from { project.plugins.hasPlugin(ShadowPlugin) ? project.shadowJar : project.jar }
|
||||
from { project.plugins.hasPlugin(ShadowPlugin) ? project.configurations.shadow : project.configurations.runtime - project.configurations.compileOnly }
|
||||
from project.configurations.runtime - project.configurations.compileOnly
|
||||
// extra files for the plugin to go into the zip
|
||||
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
|
||||
from('src/main') {
|
||||
|
@ -161,33 +176,6 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
}
|
||||
|
||||
/** Adds a task to move jar and associated files to a "-client" name. */
|
||||
protected static void addClientJarTask(Project project) {
|
||||
Task clientJar = project.tasks.create('clientJar')
|
||||
clientJar.dependsOn(project.jar, project.tasks.generatePomFileForClientJarPublication, project.javadocJar, project.sourcesJar)
|
||||
clientJar.doFirst {
|
||||
Path jarFile = project.jar.outputs.files.singleFile.toPath()
|
||||
String clientFileName = jarFile.fileName.toString().replace(project.version, "client-${project.version}")
|
||||
Files.copy(jarFile, jarFile.resolveSibling(clientFileName), StandardCopyOption.REPLACE_EXISTING)
|
||||
|
||||
String clientPomFileName = clientFileName.replace('.jar', '.pom')
|
||||
Files.copy(
|
||||
project.tasks.generatePomFileForClientJarPublication.outputs.files.singleFile.toPath(),
|
||||
jarFile.resolveSibling(clientPomFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
)
|
||||
|
||||
String sourcesFileName = jarFile.fileName.toString().replace('.jar', '-sources.jar')
|
||||
String clientSourcesFileName = clientFileName.replace('.jar', '-sources.jar')
|
||||
Files.copy(jarFile.resolveSibling(sourcesFileName), jarFile.resolveSibling(clientSourcesFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING)
|
||||
|
||||
String javadocFileName = jarFile.fileName.toString().replace('.jar', '-javadoc.jar')
|
||||
String clientJavadocFileName = clientFileName.replace('.jar', '-javadoc.jar')
|
||||
Files.copy(jarFile.resolveSibling(javadocFileName), jarFile.resolveSibling(clientJavadocFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING)
|
||||
}
|
||||
project.assemble.dependsOn(clientJar)
|
||||
}
|
||||
|
||||
static final Pattern GIT_PATTERN = Pattern.compile(/git@([^:]+):([^\.]+)\.git/)
|
||||
|
||||
|
@ -209,39 +197,11 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
|
||||
/** Adds nebula publishing task to generate a pom file for the plugin. */
|
||||
protected static void addClientJarPomGeneration(Project project) {
|
||||
project.plugins.apply(MavenPublishPlugin.class)
|
||||
|
||||
project.publishing {
|
||||
publications {
|
||||
clientJar(MavenPublication) {
|
||||
from project.components.java
|
||||
artifactId = project.pluginProperties.extension.name + '-client'
|
||||
pom.withXml { XmlProvider xml ->
|
||||
Node root = xml.asNode()
|
||||
root.appendNode('name', project.pluginProperties.extension.name)
|
||||
root.appendNode('description', project.pluginProperties.extension.description)
|
||||
root.appendNode('url', urlFromOrigin(project.scminfo.origin))
|
||||
Node scmNode = root.appendNode('scm')
|
||||
scmNode.appendNode('url', project.scminfo.origin)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
project.plugins.apply(MavenScmPlugin.class)
|
||||
project.description = project.pluginProperties.extension.description
|
||||
}
|
||||
|
||||
/** Configure the pom for the main jar of this plugin */
|
||||
protected static void configureJarPom(Project project) {
|
||||
project.plugins.apply(ScmInfoPlugin.class)
|
||||
project.plugins.apply(MavenPublishPlugin.class)
|
||||
|
||||
project.publishing {
|
||||
publications {
|
||||
nebula(MavenPublication) {
|
||||
artifactId project.pluginProperties.extension.name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void addNoticeGeneration(Project project) {
|
||||
File licenseFile = project.pluginProperties.extension.licenseFile
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Classpath
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
|
||||
/**
|
||||
* Runs CheckJarHell on a classpath.
|
||||
*/
|
||||
|
@ -34,11 +35,18 @@ public class JarHellTask extends LoggedExec {
|
|||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
File successMarker = new File(project.buildDir, 'markers/jarHell')
|
||||
File successMarker
|
||||
|
||||
@Classpath
|
||||
FileCollection classpath
|
||||
|
||||
public JarHellTask() {
|
||||
successMarker = new File(project.buildDir, 'markers/jarHell-' + getName())
|
||||
project.afterEvaluate {
|
||||
FileCollection classpath = project.sourceSets.test.runtimeClasspath
|
||||
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
||||
classpath += project.configurations.bundle
|
||||
}
|
||||
inputs.files(classpath)
|
||||
dependsOn(classpath)
|
||||
description = "Runs CheckJarHell on ${classpath}"
|
||||
|
|
|
@ -18,18 +18,12 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
|
||||
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
|
||||
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
||||
import org.gradle.api.JavaVersion
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.artifacts.Configuration
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.plugins.quality.Checkstyle
|
||||
import org.gradle.api.tasks.JavaExec
|
||||
import org.gradle.api.tasks.StopExecutionException
|
||||
|
||||
/**
|
||||
* Validation tasks which should be run before committing. These run before tests.
|
||||
*/
|
||||
|
@ -37,20 +31,22 @@ class PrecommitTasks {
|
|||
|
||||
/** Adds a precommit task, which depends on non-test verification tasks. */
|
||||
public static Task create(Project project, boolean includeDependencyLicenses) {
|
||||
project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5')
|
||||
}
|
||||
|
||||
List<Task> precommitTasks = [
|
||||
configureForbiddenApis(project),
|
||||
configureCheckstyle(project),
|
||||
configureForbiddenApisCli(project),
|
||||
configureNamingConventions(project),
|
||||
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
|
||||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('filepermissions', FilePermissionsTask.class),
|
||||
project.tasks.create('jarHell', JarHellTask.class),
|
||||
project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
configureJarHell(project),
|
||||
configureThirdPartyAudit(project)
|
||||
]
|
||||
|
||||
// Configure it but don't add it as a dependency yet
|
||||
configureForbiddenApisCli(project)
|
||||
|
||||
// tasks with just tests don't need dependency licenses, so this flag makes adding
|
||||
// the task optional
|
||||
if (includeDependencyLicenses) {
|
||||
|
@ -84,77 +80,61 @@ class PrecommitTasks {
|
|||
return project.tasks.create(precommitOptions)
|
||||
}
|
||||
|
||||
private static Task configureForbiddenApis(Project project) {
|
||||
project.pluginManager.apply(ForbiddenApisPlugin.class)
|
||||
project.forbiddenApis {
|
||||
failOnUnsupportedJava = false
|
||||
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-non-portable', 'jdk-system-out']
|
||||
signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'),
|
||||
getClass().getResource('/forbidden/es-all-signatures.txt')]
|
||||
suppressAnnotations = ['**.SuppressForbidden']
|
||||
private static Task configureJarHell(Project project) {
|
||||
Task task = project.tasks.create('jarHell', JarHellTask.class)
|
||||
task.classpath = project.sourceSets.test.runtimeClasspath
|
||||
return task
|
||||
}
|
||||
project.tasks.withType(CheckForbiddenApis) {
|
||||
// we do not use the += operator to add signatures, as conventionMappings of Gradle do not work when it's configured using withType:
|
||||
if (name.endsWith('Test')) {
|
||||
signaturesURLs = project.forbiddenApis.signaturesURLs +
|
||||
[ getClass().getResource('/forbidden/es-test-signatures.txt'), getClass().getResource('/forbidden/http-signatures.txt') ]
|
||||
} else {
|
||||
signaturesURLs = project.forbiddenApis.signaturesURLs +
|
||||
[ getClass().getResource('/forbidden/es-server-signatures.txt') ]
|
||||
}
|
||||
// forbidden apis doesn't support Java 11, so stop at 10
|
||||
String targetMajorVersion = (project.compilerJavaVersion.compareTo(JavaVersion.VERSION_1_10) > 0 ?
|
||||
JavaVersion.VERSION_1_10 :
|
||||
project.compilerJavaVersion).getMajorVersion()
|
||||
targetCompatibility = Integer.parseInt(targetMajorVersion) >= 9 ?targetMajorVersion : "1.${targetMajorVersion}"
|
||||
}
|
||||
Task forbiddenApis = project.tasks.findByName('forbiddenApis')
|
||||
forbiddenApis.group = "" // clear group, so this does not show up under verification tasks
|
||||
|
||||
return forbiddenApis
|
||||
private static Task configureThirdPartyAudit(Project project) {
|
||||
ThirdPartyAuditTask thirdPartyAuditTask = project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
thirdPartyAuditTask.configure {
|
||||
dependsOn(buildResources)
|
||||
signatureFile = buildResources.copy("forbidden/third-party-audit.txt")
|
||||
javaHome = project.runtimeJavaHome
|
||||
targetCompatibility = project.runtimeJavaVersion
|
||||
}
|
||||
return thirdPartyAuditTask
|
||||
}
|
||||
|
||||
private static Task configureForbiddenApisCli(Project project) {
|
||||
project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.5'
|
||||
}
|
||||
Task forbiddenApisCli = project.tasks.create('forbiddenApisCli')
|
||||
|
||||
project.sourceSets.forEach { sourceSet ->
|
||||
Task forbiddenApisCli = project.tasks.create('forbiddenApis')
|
||||
project.sourceSets.all { sourceSet ->
|
||||
forbiddenApisCli.dependsOn(
|
||||
project.tasks.create(sourceSet.getTaskName('forbiddenApisCli', null), JavaExec) {
|
||||
project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) {
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
dependsOn(buildResources)
|
||||
classpath = project.files(
|
||||
project.configurations.forbiddenApisCliJar,
|
||||
sourceSet.compileClasspath,
|
||||
sourceSet.runtimeClasspath
|
||||
it.sourceSet = sourceSet
|
||||
javaHome = project.runtimeJavaHome
|
||||
targetCompatibility = project.compilerJavaVersion
|
||||
bundledSignatures = [
|
||||
"jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"
|
||||
]
|
||||
signaturesFiles = project.files(
|
||||
buildResources.copy("forbidden/jdk-signatures.txt"),
|
||||
buildResources.copy("forbidden/es-all-signatures.txt")
|
||||
)
|
||||
main = 'de.thetaphi.forbiddenapis.cli.CliMain'
|
||||
executable = "${project.runtimeJavaHome}/bin/java"
|
||||
args "-b", 'jdk-unsafe-1.8'
|
||||
args "-b", 'jdk-deprecated-1.8'
|
||||
args "-b", 'jdk-non-portable'
|
||||
args "-b", 'jdk-system-out'
|
||||
args "-f", buildResources.copy("forbidden/jdk-signatures.txt")
|
||||
args "-f", buildResources.copy("forbidden/es-all-signatures.txt")
|
||||
args "--suppressannotation", '**.SuppressForbidden'
|
||||
suppressAnnotations = ['**.SuppressForbidden']
|
||||
if (sourceSet.name == 'test') {
|
||||
args "-f", buildResources.copy("forbidden/es-test-signatures.txt")
|
||||
args "-f", buildResources.copy("forbidden/http-signatures.txt")
|
||||
signaturesFiles += project.files(
|
||||
buildResources.copy("forbidden/es-test-signatures.txt"),
|
||||
buildResources.copy("forbidden/http-signatures.txt")
|
||||
)
|
||||
} else {
|
||||
args "-f", buildResources.copy("forbidden/es-server-signatures.txt")
|
||||
signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt"))
|
||||
}
|
||||
dependsOn sourceSet.classesTaskName
|
||||
doFirst {
|
||||
// Forbidden APIs expects only existing dirs, and requires at least one
|
||||
FileCollection existingOutputs = sourceSet.output.classesDirs
|
||||
.filter { it.exists() }
|
||||
if (existingOutputs.isEmpty()) {
|
||||
throw new StopExecutionException("${sourceSet.name} has no outputs")
|
||||
classesDirs = sourceSet.output.classesDirs
|
||||
ext.replaceSignatureFiles = { String... names ->
|
||||
signaturesFiles = project.files(
|
||||
names.collect { buildResources.copy("forbidden/${it}.txt") }
|
||||
)
|
||||
}
|
||||
existingOutputs.forEach { args "-d", it }
|
||||
ext.addSignatureFiles = { String... names ->
|
||||
signaturesFiles += project.files(
|
||||
names.collect { buildResources.copy("forbidden/${it}.txt") }
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,291 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.apache.tools.ant.BuildEvent;
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.BuildListener;
|
||||
import org.apache.tools.ant.BuildLogger;
|
||||
import org.apache.tools.ant.DefaultLogger;
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.elasticsearch.gradle.AntTask;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Basic static checking to keep tabs on third party JARs
|
||||
*/
|
||||
public class ThirdPartyAuditTask extends AntTask {
|
||||
|
||||
// patterns for classes to exclude, because we understand their issues
|
||||
private List<String> excludes = [];
|
||||
|
||||
/**
|
||||
* Input for the task. Set javadoc for {#link getJars} for more. Protected
|
||||
* so the afterEvaluate closure in the constructor can write it.
|
||||
*/
|
||||
protected FileCollection jars;
|
||||
|
||||
/**
|
||||
* Classpath against which to run the third patty audit. Protected so the
|
||||
* afterEvaluate closure in the constructor can write it.
|
||||
*/
|
||||
protected FileCollection classpath;
|
||||
|
||||
/**
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
File successMarker = new File(project.buildDir, 'markers/thirdPartyAudit')
|
||||
|
||||
ThirdPartyAuditTask() {
|
||||
// we depend on this because its the only reliable configuration
|
||||
// this probably makes the build slower: gradle you suck here when it comes to configurations, you pay the price.
|
||||
dependsOn(project.configurations.testCompile);
|
||||
description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'";
|
||||
|
||||
project.afterEvaluate {
|
||||
Configuration configuration = project.configurations.findByName('runtime')
|
||||
Configuration compileOnly = project.configurations.findByName('compileOnly')
|
||||
if (configuration == null) {
|
||||
// some projects apparently do not have 'runtime'? what a nice inconsistency,
|
||||
// basically only serves to waste time in build logic!
|
||||
configuration = project.configurations.findByName('testCompile')
|
||||
}
|
||||
assert configuration != null
|
||||
if (compileOnly == null) {
|
||||
classpath = configuration
|
||||
} else {
|
||||
classpath = project.files(configuration, compileOnly)
|
||||
}
|
||||
|
||||
// we only want third party dependencies.
|
||||
jars = configuration.fileCollection({ dependency ->
|
||||
dependency.group.startsWith("org.elasticsearch") == false
|
||||
});
|
||||
|
||||
// we don't want provided dependencies, which we have already scanned. e.g. don't
|
||||
// scan ES core's dependencies for every single plugin
|
||||
if (compileOnly != null) {
|
||||
jars -= compileOnly
|
||||
}
|
||||
inputs.files(jars)
|
||||
onlyIf { jars.isEmpty() == false }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* classes that should be excluded from the scan,
|
||||
* e.g. because we know what sheisty stuff those particular classes are up to.
|
||||
*/
|
||||
public void setExcludes(String[] classes) {
|
||||
for (String s : classes) {
|
||||
if (s.indexOf('*') != -1) {
|
||||
throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!");
|
||||
}
|
||||
}
|
||||
excludes = classes.sort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns current list of exclusions.
|
||||
*/
|
||||
@Input
|
||||
public List<String> getExcludes() {
|
||||
return excludes;
|
||||
}
|
||||
|
||||
// yes, we parse Uwe Schindler's errors to find missing classes, and to keep a continuous audit. Just don't let him know!
|
||||
static final Pattern MISSING_CLASS_PATTERN =
|
||||
Pattern.compile(/WARNING: The referenced class '(.*)' cannot be loaded\. Please fix the classpath\!/);
|
||||
|
||||
static final Pattern VIOLATION_PATTERN =
|
||||
Pattern.compile(/\s\sin ([a-zA-Z0-9\$\.]+) \(.*\)/);
|
||||
|
||||
// we log everything and capture errors and handle them with our whitelist
|
||||
// this is important, as we detect stale whitelist entries, workaround forbidden apis bugs,
|
||||
// and it also allows whitelisting missing classes!
|
||||
static class EvilLogger extends DefaultLogger {
|
||||
final Set<String> missingClasses = new TreeSet<>();
|
||||
final Map<String,List<String>> violations = new TreeMap<>();
|
||||
String previousLine = null;
|
||||
|
||||
@Override
|
||||
public void messageLogged(BuildEvent event) {
|
||||
if (event.getTask().getClass() == de.thetaphi.forbiddenapis.ant.AntTask.class) {
|
||||
if (event.getPriority() == Project.MSG_WARN) {
|
||||
Matcher m = MISSING_CLASS_PATTERN.matcher(event.getMessage());
|
||||
if (m.matches()) {
|
||||
missingClasses.add(m.group(1).replace('.', '/') + ".class");
|
||||
}
|
||||
|
||||
// Reset the priority of the event to DEBUG, so it doesn't
|
||||
// pollute the build output
|
||||
event.setMessage(event.getMessage(), Project.MSG_DEBUG);
|
||||
} else if (event.getPriority() == Project.MSG_ERR) {
|
||||
Matcher m = VIOLATION_PATTERN.matcher(event.getMessage());
|
||||
if (m.matches()) {
|
||||
String violation = previousLine + '\n' + event.getMessage();
|
||||
String clazz = m.group(1).replace('.', '/') + ".class";
|
||||
List<String> current = violations.get(clazz);
|
||||
if (current == null) {
|
||||
current = new ArrayList<>();
|
||||
violations.put(clazz, current);
|
||||
}
|
||||
current.add(violation);
|
||||
}
|
||||
previousLine = event.getMessage();
|
||||
}
|
||||
}
|
||||
super.messageLogged(event);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BuildLogger makeLogger(PrintStream stream, int outputLevel) {
|
||||
DefaultLogger log = new EvilLogger();
|
||||
log.errorPrintStream = stream;
|
||||
log.outputPrintStream = stream;
|
||||
log.messageOutputLevel = outputLevel;
|
||||
return log;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void runAnt(AntBuilder ant) {
|
||||
ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask);
|
||||
|
||||
// print which jars we are going to scan, always
|
||||
// this is not the time to try to be succinct! Forbidden will print plenty on its own!
|
||||
Set<String> names = new TreeSet<>();
|
||||
for (File jar : jars) {
|
||||
names.add(jar.getName());
|
||||
}
|
||||
|
||||
// TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first,
|
||||
// and then remove our temp dir afterwards. don't complain: try it yourself.
|
||||
// we don't use gradle temp dir handling, just google it, or try it yourself.
|
||||
|
||||
File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit');
|
||||
|
||||
// clean up any previous mess (if we failed), then unzip everything to one directory
|
||||
ant.delete(dir: tmpDir.getAbsolutePath());
|
||||
tmpDir.mkdirs();
|
||||
for (File jar : jars) {
|
||||
ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
// convert exclusion class names to binary file names
|
||||
List<String> excludedFiles = excludes.collect {it.replace('.', '/') + ".class"}
|
||||
Set<String> excludedSet = new TreeSet<>(excludedFiles);
|
||||
|
||||
// jarHellReprise
|
||||
Set<String> sheistySet = getSheistyClasses(tmpDir.toPath());
|
||||
|
||||
try {
|
||||
ant.thirdPartyAudit(failOnUnsupportedJava: false,
|
||||
failOnMissingClasses: false,
|
||||
classpath: classpath.asPath) {
|
||||
fileset(dir: tmpDir)
|
||||
signatures {
|
||||
string(value: getClass().getResourceAsStream('/forbidden/third-party-audit.txt').getText('UTF-8'))
|
||||
}
|
||||
}
|
||||
} catch (BuildException ignore) {}
|
||||
|
||||
EvilLogger evilLogger = null;
|
||||
for (BuildListener listener : ant.project.getBuildListeners()) {
|
||||
if (listener instanceof EvilLogger) {
|
||||
evilLogger = (EvilLogger) listener;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert evilLogger != null;
|
||||
|
||||
// keep our whitelist up to date
|
||||
Set<String> bogusExclusions = new TreeSet<>(excludedSet);
|
||||
bogusExclusions.removeAll(sheistySet);
|
||||
bogusExclusions.removeAll(evilLogger.missingClasses);
|
||||
bogusExclusions.removeAll(evilLogger.violations.keySet());
|
||||
if (!bogusExclusions.isEmpty()) {
|
||||
throw new IllegalStateException("Invalid exclusions, nothing is wrong with these classes: " + bogusExclusions);
|
||||
}
|
||||
|
||||
// don't duplicate classes with the JDK
|
||||
sheistySet.removeAll(excludedSet);
|
||||
if (!sheistySet.isEmpty()) {
|
||||
throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet);
|
||||
}
|
||||
|
||||
// don't allow a broken classpath
|
||||
evilLogger.missingClasses.removeAll(excludedSet);
|
||||
if (!evilLogger.missingClasses.isEmpty()) {
|
||||
throw new IllegalStateException("CLASSES ARE MISSING! " + evilLogger.missingClasses);
|
||||
}
|
||||
|
||||
// don't use internal classes
|
||||
evilLogger.violations.keySet().removeAll(excludedSet);
|
||||
if (!evilLogger.violations.isEmpty()) {
|
||||
throw new IllegalStateException("VIOLATIONS WERE FOUND! " + evilLogger.violations);
|
||||
}
|
||||
|
||||
// clean up our mess (if we succeed)
|
||||
ant.delete(dir: tmpDir.getAbsolutePath());
|
||||
|
||||
successMarker.setText("", 'UTF-8')
|
||||
}
|
||||
|
||||
/**
|
||||
* check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk!
|
||||
*/
|
||||
private Set<String> getSheistyClasses(Path root) {
|
||||
// system.parent = extensions loader.
|
||||
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!).
|
||||
// but groovy/gradle needs to work at all first!
|
||||
ClassLoader ext = ClassLoader.getSystemClassLoader().getParent();
|
||||
assert ext != null;
|
||||
|
||||
Set<String> sheistySet = new TreeSet<>();
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
String entry = root.relativize(file).toString().replace('\\', '/');
|
||||
if (entry.endsWith(".class")) {
|
||||
if (ext.getResource(entry) != null) {
|
||||
sheistySet.add(entry);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
return sheistySet;
|
||||
}
|
||||
}
|
|
@ -337,7 +337,13 @@ class ClusterFormationTasks {
|
|||
if (node.nodeVersion.major >= 7) {
|
||||
esConfig['indices.breaker.total.use_real_memory'] = false
|
||||
}
|
||||
esConfig.putAll(node.config.settings)
|
||||
for (Map.Entry<String, Object> setting : node.config.settings) {
|
||||
if (setting.value == null) {
|
||||
esConfig.remove(setting.key)
|
||||
} else {
|
||||
esConfig.put(setting.key, setting.value)
|
||||
}
|
||||
}
|
||||
|
||||
Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
|
||||
writeConfig.doFirst {
|
||||
|
|
|
@ -53,6 +53,8 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
|
||||
// only setup tests to build
|
||||
project.sourceSets.create('test')
|
||||
// create a compileOnly configuration as others might expect it
|
||||
project.configurations.create("compileOnly")
|
||||
project.dependencies.add('testCompile', "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}")
|
||||
|
||||
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch;
|
||||
|
||||
import org.gradle.api.Action;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.file.CopySpec;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.tasks.WorkResult;
|
||||
import org.gradle.process.ExecResult;
|
||||
import org.gradle.process.JavaExecSpec;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Facilitate access to Gradle services without a direct dependency on Project.
|
||||
*
|
||||
* In a future release Gradle will offer service injection, this adapter plays that role until that time.
|
||||
* It exposes the service methods that are part of the public API as the classes implementing them are not.
|
||||
* Today service injection is <a href="https://github.com/gradle/gradle/issues/2363">not available</a> for
|
||||
* extensions.
|
||||
*
|
||||
* Everything exposed here must be thread safe. That is the very reason why project is not passed in directly.
|
||||
*/
|
||||
public class GradleServicesAdapter {
|
||||
|
||||
public final Project project;
|
||||
|
||||
public GradleServicesAdapter(Project project) {
|
||||
this.project = project;
|
||||
}
|
||||
|
||||
public static GradleServicesAdapter getInstance(Project project) {
|
||||
return new GradleServicesAdapter(project);
|
||||
}
|
||||
|
||||
public WorkResult copy(Action<? super CopySpec> action) {
|
||||
return project.copy(action);
|
||||
}
|
||||
|
||||
public WorkResult sync(Action<? super CopySpec> action) {
|
||||
return project.sync(action);
|
||||
}
|
||||
|
||||
public ExecResult javaexec(Action<? super JavaExecSpec> action) {
|
||||
return project.javaexec(action);
|
||||
}
|
||||
|
||||
public FileTree zipTree(File zipPath) {
|
||||
return project.zipTree(zipPath);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
public enum Distribution {
|
||||
|
||||
INTEG_TEST("integ-test-zip"),
|
||||
ZIP("zip"),
|
||||
ZIP_OSS("zip-oss");
|
||||
|
||||
private final String name;
|
||||
|
||||
Distribution(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -35,6 +35,7 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
@ -105,7 +106,7 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
|
|||
if (is == null) {
|
||||
throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found");
|
||||
}
|
||||
Files.copy(is, destination);
|
||||
Files.copy(is, destination, StandardCopyOption.REPLACE_EXISTING);
|
||||
} catch (IOException e) {
|
||||
throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class JdkJarHellCheck {
|
||||
|
||||
private Set<String> detected = new HashSet<>();
|
||||
|
||||
private void scanForJDKJarHell(Path root) throws IOException {
|
||||
// system.parent = extensions loader.
|
||||
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!)
|
||||
ClassLoader ext = ClassLoader.getSystemClassLoader().getParent();
|
||||
assert ext != null;
|
||||
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
|
||||
String entry = root.relativize(file).toString().replace('\\', '/');
|
||||
if (entry.endsWith(".class")) {
|
||||
if (ext.getResource(entry) != null) {
|
||||
detected.add(
|
||||
entry
|
||||
.replace("/", ".")
|
||||
.replace(".class","")
|
||||
);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Set<String> getDetected() {
|
||||
return Collections.unmodifiableSet(detected);
|
||||
}
|
||||
|
||||
public static void main(String[] argv) throws IOException {
|
||||
JdkJarHellCheck checker = new JdkJarHellCheck();
|
||||
for (String location : argv) {
|
||||
Path path = Paths.get(location);
|
||||
if (Files.exists(path) == false) {
|
||||
throw new IllegalArgumentException("Path does not exist: " + path);
|
||||
}
|
||||
checker.scanForJDKJarHell(path);
|
||||
}
|
||||
if (checker.getDetected().isEmpty()) {
|
||||
System.exit(0);
|
||||
} else {
|
||||
checker.getDetected().forEach(System.out::println);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
import org.gradle.api.NamedDomainObjectContainer;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.execution.TaskActionListener;
|
||||
import org.gradle.api.execution.TaskExecutionListener;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.api.tasks.TaskState;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ClusterformationPlugin implements Plugin<Project> {
|
||||
|
||||
public static final String LIST_TASK_NAME = "listElasticSearchClusters";
|
||||
public static final String EXTENSION_NAME = "elasticSearchClusters";
|
||||
|
||||
private final Logger logger = Logging.getLogger(ClusterformationPlugin.class);
|
||||
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
NamedDomainObjectContainer<? extends ElasticsearchConfiguration> container = project.container(
|
||||
ElasticsearchNode.class,
|
||||
(name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project))
|
||||
);
|
||||
project.getExtensions().add(EXTENSION_NAME, container);
|
||||
|
||||
Task listTask = project.getTasks().create(LIST_TASK_NAME);
|
||||
listTask.setGroup("ES cluster formation");
|
||||
listTask.setDescription("Lists all ES clusters configured for this project");
|
||||
listTask.doLast((Task task) ->
|
||||
container.forEach((ElasticsearchConfiguration cluster) ->
|
||||
logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getDistribution())
|
||||
)
|
||||
);
|
||||
|
||||
Map<Task, List<ElasticsearchConfiguration>> taskToCluster = new HashMap<>();
|
||||
|
||||
// register an extension for all current and future tasks, so that any task can declare that it wants to use a
|
||||
// specific cluster.
|
||||
project.getTasks().all((Task task) ->
|
||||
task.getExtensions().findByType(ExtraPropertiesExtension.class)
|
||||
.set(
|
||||
"useCluster",
|
||||
new Closure<Void>(this, this) {
|
||||
public void doCall(ElasticsearchConfiguration conf) {
|
||||
taskToCluster.computeIfAbsent(task, k -> new ArrayList<>()).add(conf);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
project.getGradle().getTaskGraph().whenReady(taskExecutionGraph ->
|
||||
taskExecutionGraph.getAllTasks()
|
||||
.forEach(task ->
|
||||
taskToCluster.getOrDefault(task, Collections.emptyList()).forEach(ElasticsearchConfiguration::claim)
|
||||
)
|
||||
);
|
||||
project.getGradle().addListener(
|
||||
new TaskActionListener() {
|
||||
@Override
|
||||
public void beforeActions(Task task) {
|
||||
// we only start the cluster before the actions, so we'll not start it if the task is up-to-date
|
||||
taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::start);
|
||||
}
|
||||
@Override
|
||||
public void afterActions(Task task) {}
|
||||
}
|
||||
);
|
||||
project.getGradle().addListener(
|
||||
new TaskExecutionListener() {
|
||||
@Override
|
||||
public void afterExecute(Task task, TaskState state) {
|
||||
// always un-claim the cluster, even if _this_ task is up-to-date, as others might not have been and caused the
|
||||
// cluster to start.
|
||||
taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::unClaimAndStop);
|
||||
}
|
||||
@Override
|
||||
public void beforeExecute(Task task) {}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import org.elasticsearch.gradle.Distribution;
|
||||
import org.elasticsearch.gradle.Version;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
public interface ElasticsearchConfiguration {
|
||||
String getName();
|
||||
|
||||
Version getVersion();
|
||||
|
||||
void setVersion(Version version);
|
||||
|
||||
default void setVersion(String version) {
|
||||
setVersion(Version.fromString(version));
|
||||
}
|
||||
|
||||
Distribution getDistribution();
|
||||
|
||||
void setDistribution(Distribution distribution);
|
||||
|
||||
void claim();
|
||||
|
||||
Future<Void> start();
|
||||
|
||||
void unClaimAndStop();
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
import org.elasticsearch.gradle.Distribution;
|
||||
import org.elasticsearch.gradle.Version;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class ElasticsearchNode implements ElasticsearchConfiguration {
|
||||
|
||||
private final String name;
|
||||
private final GradleServicesAdapter services;
|
||||
private final AtomicInteger noOfClaims = new AtomicInteger();
|
||||
private final AtomicBoolean started = new AtomicBoolean(false);
|
||||
private final Logger logger = Logging.getLogger(ElasticsearchNode.class);
|
||||
|
||||
private Distribution distribution;
|
||||
private Version version;
|
||||
|
||||
public ElasticsearchNode(String name, GradleServicesAdapter services) {
|
||||
this.name = name;
|
||||
this.services = services;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Version getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setVersion(Version version) {
|
||||
checkNotRunning();
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Distribution getDistribution() {
|
||||
return distribution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDistribution(Distribution distribution) {
|
||||
checkNotRunning();
|
||||
this.distribution = distribution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void claim() {
|
||||
noOfClaims.incrementAndGet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the cluster if not running. Does nothing if the cluster is already running.
|
||||
*
|
||||
* @return future of thread running in the background
|
||||
*/
|
||||
@Override
|
||||
public Future<Void> start() {
|
||||
if (started.getAndSet(true)) {
|
||||
logger.lifecycle("Already started cluster: {}", name);
|
||||
} else {
|
||||
logger.lifecycle("Starting cluster: {}", name);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops a running cluster if it's not claimed. Does nothing otherwise.
|
||||
*/
|
||||
@Override
|
||||
public void unClaimAndStop() {
|
||||
int decrementedClaims = noOfClaims.decrementAndGet();
|
||||
if (decrementedClaims > 0) {
|
||||
logger.lifecycle("Not stopping {}, since cluster still has {} claim(s)", name, decrementedClaims);
|
||||
return;
|
||||
}
|
||||
if (started.get() == false) {
|
||||
logger.lifecycle("Asked to unClaimAndStop, but cluster was not running: {}", name);
|
||||
return;
|
||||
}
|
||||
logger.lifecycle("Stopping {}, number of claims is {}", name, decrementedClaims);
|
||||
}
|
||||
|
||||
private void checkNotRunning() {
|
||||
if (started.get()) {
|
||||
throw new IllegalStateException("Configuration can not be altered while running ");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ElasticsearchNode that = (ElasticsearchNode) o;
|
||||
return Objects.equals(name, that.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,190 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||
import org.gradle.api.tasks.SourceSet;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.process.JavaExecSpec;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class ForbiddenApisCliTask extends DefaultTask {
|
||||
|
||||
private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class);
|
||||
private FileCollection signaturesFiles;
|
||||
private List<String> signatures = new ArrayList<>();
|
||||
private Set<String> bundledSignatures = new LinkedHashSet<>();
|
||||
private Set<String> suppressAnnotations = new LinkedHashSet<>();
|
||||
private JavaVersion targetCompatibility;
|
||||
private FileCollection classesDirs;
|
||||
private SourceSet sourceSet;
|
||||
// This needs to be an object so it can hold Groovy GStrings
|
||||
private Object javaHome;
|
||||
|
||||
@Input
|
||||
public JavaVersion getTargetCompatibility() {
|
||||
return targetCompatibility;
|
||||
}
|
||||
|
||||
public void setTargetCompatibility(JavaVersion targetCompatibility) {
|
||||
if (targetCompatibility.compareTo(JavaVersion.VERSION_1_10) > 0) {
|
||||
logger.warn(
|
||||
"Target compatibility is set to {} but forbiddenapis only supports up to 10. Will cap at 10.",
|
||||
targetCompatibility
|
||||
);
|
||||
this.targetCompatibility = JavaVersion.VERSION_1_10;
|
||||
} else {
|
||||
this.targetCompatibility = targetCompatibility;
|
||||
}
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public File getMarkerFile() {
|
||||
return new File(
|
||||
new File(getProject().getBuildDir(), "precommit"),
|
||||
getName()
|
||||
);
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
@SkipWhenEmpty
|
||||
public FileCollection getClassesDirs() {
|
||||
return classesDirs.filter(File::exists);
|
||||
}
|
||||
|
||||
public void setClassesDirs(FileCollection classesDirs) {
|
||||
this.classesDirs = classesDirs;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public FileCollection getSignaturesFiles() {
|
||||
return signaturesFiles;
|
||||
}
|
||||
|
||||
public void setSignaturesFiles(FileCollection signaturesFiles) {
|
||||
this.signaturesFiles = signaturesFiles;
|
||||
}
|
||||
|
||||
@Input
|
||||
public List<String> getSignatures() {
|
||||
return signatures;
|
||||
}
|
||||
|
||||
public void setSignatures(List<String> signatures) {
|
||||
this.signatures = signatures;
|
||||
}
|
||||
|
||||
@Input
|
||||
public Set<String> getBundledSignatures() {
|
||||
return bundledSignatures;
|
||||
}
|
||||
|
||||
public void setBundledSignatures(Set<String> bundledSignatures) {
|
||||
this.bundledSignatures = bundledSignatures;
|
||||
}
|
||||
|
||||
@Input
|
||||
public Set<String> getSuppressAnnotations() {
|
||||
return suppressAnnotations;
|
||||
}
|
||||
|
||||
public void setSuppressAnnotations(Set<String> suppressAnnotations) {
|
||||
this.suppressAnnotations = suppressAnnotations;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public FileCollection getClassPathFromSourceSet() {
|
||||
return getProject().files(
|
||||
sourceSet.getCompileClasspath(),
|
||||
sourceSet.getRuntimeClasspath()
|
||||
);
|
||||
}
|
||||
|
||||
public void setSourceSet(SourceSet sourceSet) {
|
||||
this.sourceSet = sourceSet;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getForbiddenAPIsConfiguration() {
|
||||
return getProject().getConfigurations().getByName("forbiddenApisCliJar");
|
||||
}
|
||||
|
||||
@Input
|
||||
public Object getJavaHome() {
|
||||
return javaHome;
|
||||
}
|
||||
|
||||
public void setJavaHome(Object javaHome) {
|
||||
this.javaHome = javaHome;
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void runForbiddenApisAndWriteMarker() throws IOException {
|
||||
getProject().javaexec((JavaExecSpec spec) -> {
|
||||
spec.classpath(
|
||||
getForbiddenAPIsConfiguration(),
|
||||
getClassPathFromSourceSet()
|
||||
);
|
||||
spec.setExecutable(getJavaHome() + "/bin/java");
|
||||
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
|
||||
// build the command line
|
||||
getSignaturesFiles().forEach(file -> spec.args("-f", file.getAbsolutePath()));
|
||||
getSuppressAnnotations().forEach(annotation -> spec.args("--suppressannotation", annotation));
|
||||
getBundledSignatures().forEach(bundled -> {
|
||||
// there's no option for target compatibility so we have to interpret it
|
||||
final String prefix;
|
||||
if (bundled.equals("jdk-system-out") ||
|
||||
bundled.equals("jdk-reflection") ||
|
||||
bundled.equals("jdk-non-portable")) {
|
||||
prefix = "";
|
||||
} else {
|
||||
prefix = "-" + (
|
||||
getTargetCompatibility().compareTo(JavaVersion.VERSION_1_9) >= 0 ?
|
||||
getTargetCompatibility().getMajorVersion() :
|
||||
"1." + getTargetCompatibility().getMajorVersion())
|
||||
;
|
||||
}
|
||||
spec.args("-b", bundled + prefix);
|
||||
}
|
||||
);
|
||||
getClassesDirs().forEach(dir ->
|
||||
spec.args("-d", dir)
|
||||
);
|
||||
});
|
||||
Files.write(getMarkerFile().toPath(), Collections.emptyList());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,309 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.elasticsearch.gradle.JdkJarHellCheck;
|
||||
import org.elasticsearch.test.NamingConventionsCheck;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputFile;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.OutputDirectory;
|
||||
import org.gradle.api.tasks.StopExecutionException;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.process.ExecResult;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ThirdPartyAuditTask extends DefaultTask {
|
||||
|
||||
private static final Pattern MISSING_CLASS_PATTERN = Pattern.compile(
|
||||
"WARNING: The referenced class '(.*)' cannot be loaded\\. Please fix the classpath!"
|
||||
);
|
||||
|
||||
private static final Pattern VIOLATION_PATTERN = Pattern.compile(
|
||||
"\\s\\sin ([a-zA-Z0-9$.]+) \\(.*\\)"
|
||||
);
|
||||
|
||||
/**
|
||||
* patterns for classes to exclude, because we understand their issues
|
||||
*/
|
||||
private Set<String> excludes = new TreeSet<>();
|
||||
|
||||
private File signatureFile;
|
||||
|
||||
private String javaHome;
|
||||
|
||||
private JavaVersion targetCompatibility;
|
||||
|
||||
@Input
|
||||
public JavaVersion getTargetCompatibility() {
|
||||
return targetCompatibility;
|
||||
}
|
||||
|
||||
public void setTargetCompatibility(JavaVersion targetCompatibility) {
|
||||
this.targetCompatibility = targetCompatibility;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getForbiddenAPIsConfiguration() {
|
||||
return getProject().getConfigurations().getByName("forbiddenApisCliJar");
|
||||
}
|
||||
|
||||
@InputFile
|
||||
public File getSignatureFile() {
|
||||
return signatureFile;
|
||||
}
|
||||
|
||||
public void setSignatureFile(File signatureFile) {
|
||||
this.signatureFile = signatureFile;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getRuntimeConfiguration() {
|
||||
Configuration runtime = getProject().getConfigurations().findByName("runtime");
|
||||
if (runtime == null) {
|
||||
return getProject().getConfigurations().getByName("testCompile");
|
||||
}
|
||||
return runtime;
|
||||
}
|
||||
|
||||
@Input
|
||||
public String getJavaHome() {
|
||||
return javaHome;
|
||||
}
|
||||
|
||||
public void setJavaHome(String javaHome) {
|
||||
this.javaHome = javaHome;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getCompileOnlyConfiguration() {
|
||||
return getProject().getConfigurations().getByName("compileOnly");
|
||||
}
|
||||
|
||||
@OutputDirectory
|
||||
public File getJarExpandDir() {
|
||||
return new File(
|
||||
new File(getProject().getBuildDir(), "precommit/thirdPartyAudit"),
|
||||
getName()
|
||||
);
|
||||
}
|
||||
|
||||
public void setExcludes(String... classes) {
|
||||
excludes.clear();
|
||||
for (String each : classes) {
|
||||
if (each.indexOf('*') != -1) {
|
||||
throw new IllegalArgumentException("illegal third party audit exclusion: '" + each + "', wildcards are not permitted!");
|
||||
}
|
||||
excludes.add(each);
|
||||
}
|
||||
}
|
||||
|
||||
@Input
|
||||
public Set<String> getExcludes() {
|
||||
return Collections.unmodifiableSet(excludes);
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void runThirdPartyAudit() throws IOException {
|
||||
FileCollection jars = getJarsToScan();
|
||||
|
||||
extractJars(jars);
|
||||
|
||||
final String forbiddenApisOutput = runForbiddenAPIsCli();
|
||||
|
||||
final Set<String> missingClasses = new TreeSet<>();
|
||||
Matcher missingMatcher = MISSING_CLASS_PATTERN.matcher(forbiddenApisOutput);
|
||||
while (missingMatcher.find()) {
|
||||
missingClasses.add(missingMatcher.group(1));
|
||||
}
|
||||
|
||||
final Set<String> violationsClasses = new TreeSet<>();
|
||||
Matcher violationMatcher = VIOLATION_PATTERN.matcher(forbiddenApisOutput);
|
||||
while (violationMatcher.find()) {
|
||||
violationsClasses.add(violationMatcher.group(1));
|
||||
}
|
||||
|
||||
Set<String> jdkJarHellClasses = runJdkJarHellCheck();
|
||||
|
||||
assertNoPointlessExclusions(missingClasses, violationsClasses, jdkJarHellClasses);
|
||||
|
||||
assertNoMissingAndViolations(missingClasses, violationsClasses);
|
||||
|
||||
assertNoJarHell(jdkJarHellClasses);
|
||||
}
|
||||
|
||||
private void extractJars(FileCollection jars) {
|
||||
File jarExpandDir = getJarExpandDir();
|
||||
// We need to clean up to make sure old dependencies don't linger
|
||||
getProject().delete(jarExpandDir);
|
||||
jars.forEach(jar ->
|
||||
getProject().copy(spec -> {
|
||||
spec.from(getProject().zipTree(jar));
|
||||
spec.into(jarExpandDir);
|
||||
// Exclude classes for multi release jars above target
|
||||
for (int i = Integer.parseInt(targetCompatibility.getMajorVersion()) + 1;
|
||||
i <= Integer.parseInt(JavaVersion.VERSION_HIGHER.getMajorVersion());
|
||||
i++
|
||||
) {
|
||||
spec.exclude("META-INF/versions/" + i + "/**");
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private void assertNoJarHell(Set<String> jdkJarHellClasses) {
|
||||
jdkJarHellClasses.removeAll(excludes);
|
||||
if (jdkJarHellClasses.isEmpty() == false) {
|
||||
throw new IllegalStateException("Jar Hell with the JDK:" + formatClassList(jdkJarHellClasses));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertNoMissingAndViolations(Set<String> missingClasses, Set<String> violationsClasses) {
|
||||
missingClasses.removeAll(excludes);
|
||||
violationsClasses.removeAll(excludes);
|
||||
String missingText = formatClassList(missingClasses);
|
||||
String violationsText = formatClassList(violationsClasses);
|
||||
if (missingText.isEmpty() && violationsText.isEmpty()) {
|
||||
getLogger().info("Third party audit passed successfully");
|
||||
} else {
|
||||
throw new IllegalStateException(
|
||||
"Audit of third party dependencies failed:\n" +
|
||||
(missingText.isEmpty() ? "" : "Missing classes:\n" + missingText) +
|
||||
(violationsText.isEmpty() ? "" : "Classes with violations:\n" + violationsText)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertNoPointlessExclusions(Set<String> missingClasses, Set<String> violationsClasses, Set<String> jdkJarHellClasses) {
|
||||
// keep our whitelist up to date
|
||||
Set<String> bogusExclusions = new TreeSet<>(excludes);
|
||||
bogusExclusions.removeAll(missingClasses);
|
||||
bogusExclusions.removeAll(jdkJarHellClasses);
|
||||
bogusExclusions.removeAll(violationsClasses);
|
||||
if (bogusExclusions.isEmpty() == false) {
|
||||
throw new IllegalStateException(
|
||||
"Invalid exclusions, nothing is wrong with these classes: " + formatClassList(bogusExclusions)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private String runForbiddenAPIsCli() throws IOException {
|
||||
ByteArrayOutputStream errorOut = new ByteArrayOutputStream();
|
||||
getProject().javaexec(spec -> {
|
||||
spec.setExecutable(javaHome + "/bin/java");
|
||||
spec.classpath(
|
||||
getForbiddenAPIsConfiguration(),
|
||||
getRuntimeConfiguration(),
|
||||
getCompileOnlyConfiguration()
|
||||
);
|
||||
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
|
||||
spec.args(
|
||||
"-f", getSignatureFile().getAbsolutePath(),
|
||||
"-d", getJarExpandDir(),
|
||||
"--allowmissingclasses"
|
||||
);
|
||||
spec.setErrorOutput(errorOut);
|
||||
if (getLogger().isInfoEnabled() == false) {
|
||||
spec.setStandardOutput(new NullOutputStream());
|
||||
}
|
||||
spec.setIgnoreExitValue(true);
|
||||
});
|
||||
final String forbiddenApisOutput;
|
||||
try (ByteArrayOutputStream outputStream = errorOut) {
|
||||
forbiddenApisOutput = outputStream.toString(StandardCharsets.UTF_8.name());
|
||||
}
|
||||
if (getLogger().isInfoEnabled()) {
|
||||
getLogger().info(forbiddenApisOutput);
|
||||
}
|
||||
return forbiddenApisOutput;
|
||||
}
|
||||
|
||||
private FileCollection getJarsToScan() {
|
||||
FileCollection jars = getRuntimeConfiguration()
|
||||
.fileCollection(dep -> dep.getGroup().startsWith("org.elasticsearch") == false);
|
||||
Configuration compileOnlyConfiguration = getCompileOnlyConfiguration();
|
||||
// don't scan provided dependencies that we already scanned, e.x. don't scan cores dependencies for every plugin
|
||||
if (compileOnlyConfiguration != null) {
|
||||
jars.minus(compileOnlyConfiguration);
|
||||
}
|
||||
if (jars.isEmpty()) {
|
||||
throw new StopExecutionException("No jars to scan");
|
||||
}
|
||||
return jars;
|
||||
}
|
||||
|
||||
private String formatClassList(Set<String> classList) {
|
||||
return classList.stream()
|
||||
.map(name -> " * " + name)
|
||||
.collect(Collectors.joining("\n"));
|
||||
}
|
||||
|
||||
private Set<String> runJdkJarHellCheck() throws IOException {
|
||||
ByteArrayOutputStream standardOut = new ByteArrayOutputStream();
|
||||
ExecResult execResult = getProject().javaexec(spec -> {
|
||||
URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation();
|
||||
if (location.getProtocol().equals("file") == false) {
|
||||
throw new GradleException("Unexpected location for NamingConventionCheck class: " + location);
|
||||
}
|
||||
try {
|
||||
spec.classpath(
|
||||
location.toURI().getPath(),
|
||||
getRuntimeConfiguration(),
|
||||
getCompileOnlyConfiguration()
|
||||
);
|
||||
} catch (URISyntaxException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
spec.setMain(JdkJarHellCheck.class.getName());
|
||||
spec.args(getJarExpandDir());
|
||||
spec.setIgnoreExitValue(true);
|
||||
spec.setExecutable(javaHome + "/bin/java");
|
||||
spec.setStandardOutput(standardOut);
|
||||
});
|
||||
if (execResult.getExitValue() == 0) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
final String jdkJarHellCheckList;
|
||||
try (ByteArrayOutputStream outputStream = standardOut) {
|
||||
jdkJarHellCheckList = outputStream.toString(StandardCharsets.UTF_8.name());
|
||||
}
|
||||
return new TreeSet<>(Arrays.asList(jdkJarHellCheckList.split("\\r?\\n")));
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
implementation-class=org.elasticsearch.gradle.clusterformation.ClusterformationPlugin
|
|
@ -23,6 +23,7 @@
|
|||
unfair. -->
|
||||
<module name="LineLength">
|
||||
<property name="max" value="140"/>
|
||||
<property name="ignorePattern" value="^ *\* *https?://[^ ]+$"/>
|
||||
</module>
|
||||
|
||||
<module name="AvoidStarImport" />
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
<!-- These files are generated by ANTLR so its silly to hold them to our rules. -->
|
||||
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
|
||||
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
|
||||
<suppress files="plugin[/\\]sql[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]sql[/\\]parser[/\\]SqlBase(Base(Listener|Visitor)|Lexer|Listener|Parser|Visitor).java" checks="." />
|
||||
|
||||
<!-- JNA requires the no-argument constructor on JNAKernel32Library.SizeT to be public-->
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="RedundantModifier" />
|
||||
|
@ -359,13 +360,7 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]TermsAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]TermsAggregatorFactory.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]support[/\\]IncludeExclude.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]cardinality[/\\]CardinalityAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]cardinality[/\\]HyperLogLogPlusPlus.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]geobounds[/\\]GeoBoundsAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]geocentroid[/\\]InternalGeoCentroid.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]percentiles[/\\]tdigest[/\\]TDigestPercentileRanksAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]percentiles[/\\]tdigest[/\\]TDigestPercentilesAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]scripted[/\\]ScriptedMetricAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]HyperLogLogPlusPlus.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationPath.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]DfsSearchResult.java" checks="LineLength" />
|
||||
|
@ -467,7 +462,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]ExpectedShardSizeAllocationTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedNodeRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedShardsRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FilterRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]IndexBalanceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]NodeVersionAllocationDeciderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]PreferLocalPrimariesToRelocatingPrimariesTests.java" checks="LineLength" />
|
||||
|
@ -539,7 +533,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentMapperMergeTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentParserTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DynamicMappingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalMetadataMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldNamesFieldMapperTests.java" checks="LineLength" />
|
||||
|
@ -641,8 +634,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsDocCountErrorIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsShardMinDocCountIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]NestedAggregatorTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]AbstractGeoTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]TopHitsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]ExtendedStatsBucketIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]moving[/\\]avg[/\\]MovAvgIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]serialdiff[/\\]SerialDiffIT.java" checks="LineLength" />
|
||||
|
|
|
@ -1 +1 @@
|
|||
4.9
|
||||
4.10
|
|
@ -26,7 +26,7 @@ class VersionCollectionTests extends GradleUnitTestCase {
|
|||
assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT"))
|
||||
assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT"))
|
||||
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT"))
|
||||
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
|
||||
assertNull(vc.maintenanceBugfixSnapshot)
|
||||
|
||||
vc.indexCompatible.containsAll(vc.versions)
|
||||
|
||||
|
@ -65,7 +65,7 @@ class VersionCollectionTests extends GradleUnitTestCase {
|
|||
assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT"))
|
||||
assertEquals(vc.stagedMinorSnapshot, null)
|
||||
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT"))
|
||||
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
|
||||
assertNull(vc.maintenanceBugfixSnapshot)
|
||||
|
||||
vc.indexCompatible.containsAll(vc.versions)
|
||||
|
||||
|
|
|
@ -153,12 +153,4 @@ public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private String getLocalTestRepoPath() {
|
||||
String property = System.getProperty("test.local-test-repo-path");
|
||||
Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests");
|
||||
File file = new File(property);
|
||||
assertTrue("Expected " + property + " to exist, but it did not!", file.exists());
|
||||
return file.getAbsolutePath();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
public class BuildPluginIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testPluginCanBeApplied() {
|
||||
BuildResult result = getGradleRunner("elasticsearch.build")
|
||||
.withArguments("hello", "-s")
|
||||
.build();
|
||||
assertTaskSuccessful(result, ":hello");
|
||||
assertOutputContains("build plugin can be applied");
|
||||
}
|
||||
|
||||
public void testCheckTask() {
|
||||
BuildResult result = getGradleRunner("elasticsearch.build")
|
||||
.withArguments("check", "assemble", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
|
||||
.build();
|
||||
assertTaskSuccessful(result, ":check");
|
||||
}
|
||||
|
||||
public void testLicenseAndNotice() throws IOException {
|
||||
BuildResult result = getGradleRunner("elasticsearch.build")
|
||||
.withArguments("clean", "assemble", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
|
||||
.build();
|
||||
|
||||
assertTaskSuccessful(result, ":assemble");
|
||||
|
||||
assertBuildFileExists(result, "elasticsearch.build", "distributions/elasticsearch.build.jar");
|
||||
|
||||
try (ZipFile zipFile = new ZipFile(new File(
|
||||
getBuildDir("elasticsearch.build"), "distributions/elasticsearch.build.jar"
|
||||
))) {
|
||||
ZipEntry licenseEntry = zipFile.getEntry("META-INF/LICENSE.txt");
|
||||
ZipEntry noticeEntry = zipFile.getEntry("META-INF/NOTICE.txt");
|
||||
assertNotNull("Jar does not have META-INF/LICENSE.txt", licenseEntry);
|
||||
assertNotNull("Jar does not have META-INF/NOTICE.txt", noticeEntry);
|
||||
try (
|
||||
InputStream license = zipFile.getInputStream(licenseEntry);
|
||||
InputStream notice = zipFile.getInputStream(noticeEntry)
|
||||
) {
|
||||
assertEquals("this is a test license file", IOUtils.toString(license, StandardCharsets.UTF_8.name()));
|
||||
assertEquals("this is a test notice file", IOUtils.toString(notice, StandardCharsets.UTF_8.name()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -40,7 +40,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withArguments("buildResources", "-s", "-i")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
assertTaskSuccessfull(result, ":buildResources");
|
||||
assertTaskSuccessful(result, ":buildResources");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
|
||||
|
||||
|
@ -61,8 +61,8 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertTaskSuccessfull(result, ":buildResources");
|
||||
assertTaskSuccessfull(result, ":sampleCopyAll");
|
||||
assertTaskSuccessful(result, ":buildResources");
|
||||
assertTaskSuccessful(result, ":sampleCopyAll");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle.xml");
|
||||
// This is a side effect of compile time reference
|
||||
assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle_suppressions.xml");
|
||||
|
@ -75,7 +75,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertTaskSuccessfull(result, ":sample");
|
||||
assertTaskSuccessful(result, ":sample");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
|
||||
}
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.gradle.testkit.runner.TaskOutcome;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testListClusters() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("listElasticSearchClusters", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":listElasticSearchClusters").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
" * myTestCluster:"
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
public void testUseClusterByOne() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("user1", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user1").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Starting cluster: myTestCluster",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
public void testUseClusterByOneWithDryRun() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("user1", "-s", "--dry-run")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertNull(result.task(":user1"));
|
||||
assertOutputDoesNotContain(
|
||||
result.getOutput(),
|
||||
"Starting cluster: myTestCluster",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
public void testUseClusterByTwo() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("user1", "user2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user1").getOutcome());
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user2").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Starting cluster: myTestCluster",
|
||||
"Not stopping myTestCluster, since cluster still has 1 claim(s)",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
public void testUseClusterByUpToDateTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("upToDate1", "upToDate2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.UP_TO_DATE, result.task(":upToDate1").getOutcome());
|
||||
assertEquals(TaskOutcome.UP_TO_DATE, result.task(":upToDate2").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Not stopping myTestCluster, since cluster still has 1 claim(s)",
|
||||
"cluster was not running: myTestCluster"
|
||||
);
|
||||
assertOutputDoesNotContain(result.getOutput(), "Starting cluster: myTestCluster");
|
||||
}
|
||||
|
||||
public void testUseClusterBySkippedTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("skipped1", "skipped2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SKIPPED, result.task(":skipped1").getOutcome());
|
||||
assertEquals(TaskOutcome.SKIPPED, result.task(":skipped2").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Not stopping myTestCluster, since cluster still has 1 claim(s)",
|
||||
"cluster was not running: myTestCluster"
|
||||
);
|
||||
assertOutputDoesNotContain(result.getOutput(), "Starting cluster: myTestCluster");
|
||||
}
|
||||
|
||||
public void tetUseClusterBySkippedAndWorkingTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("skipped1", "user1", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SKIPPED, result.task(":skipped1").getOutcome());
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user1").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"> Task :user1",
|
||||
"Starting cluster: myTestCluster",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
public class JarHellTaskIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testJarHellDetected() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("jarHell"))
|
||||
.withArguments("clean", "precommit", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertTaskFailed(result, ":jarHell");
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Exception in thread \"main\" java.lang.IllegalStateException: jar hell!",
|
||||
"class: org.apache.logging.log4j.Logger"
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -2,74 +2,57 @@ package org.elasticsearch.gradle.precommit;
|
|||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.gradle.testkit.runner.TaskOutcome;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
||||
public class NamingConventionsTaskIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testPluginCanBeApplied() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("hello", "-s", "-PcheckForTestsInMain=false")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":hello").getOutcome());
|
||||
String output = result.getOutput();
|
||||
assertTrue(output, output.contains("build plugin can be applied"));
|
||||
}
|
||||
|
||||
public void testNameCheckFailsAsItShould() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
BuildResult result = getGradleRunner("namingConventionsSelfTest")
|
||||
.withArguments("namingConventions", "-s", "-PcheckForTestsInMain=false")
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertNotNull("task did not run", result.task(":namingConventions"));
|
||||
assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome());
|
||||
String output = result.getOutput();
|
||||
for (String line : Arrays.asList(
|
||||
assertTaskFailed(result, ":namingConventions");
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
// TODO: java9 Set.of
|
||||
new HashSet<>(
|
||||
Arrays.asList(
|
||||
"Not all subclasses of UnitTestCase match the naming convention. Concrete classes must end with [Tests]:",
|
||||
"* org.elasticsearch.test.WrongName",
|
||||
"Found inner classes that are tests, which are excluded from the test runner:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT$InternalInvalidTests",
|
||||
"Classes ending with [Tests] must subclass [UnitTestCase]:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT",
|
||||
"Not all subclasses of UnitTestCase match the naming convention. Concrete classes must end with [Tests]:",
|
||||
"* org.elasticsearch.test.WrongName")) {
|
||||
assertTrue(
|
||||
"expected: '" + line + "' but it was not found in the output:\n" + output,
|
||||
output.contains(line)
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT"
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public void testNameCheckFailsAsItShouldWithMain() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
BuildResult result = getGradleRunner("namingConventionsSelfTest")
|
||||
.withArguments("namingConventions", "-s", "-PcheckForTestsInMain=true")
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertNotNull("task did not run", result.task(":namingConventions"));
|
||||
assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome());
|
||||
|
||||
String output = result.getOutput();
|
||||
for (String line : Arrays.asList(
|
||||
assertTaskFailed(result, ":namingConventions");
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
// TODO: java9 Set.of
|
||||
new HashSet<>(
|
||||
Arrays.asList(
|
||||
"Classes ending with [Tests] or [IT] or extending [UnitTestCase] must be in src/test/java:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyInterfaceTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyInterfaceTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyAbstractTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$InnerTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$NotImplementingTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongNameTheSecond",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongName")) {
|
||||
assertTrue(
|
||||
"expected: '" + line + "' but it was not found in the output:\n"+output,
|
||||
output.contains(line)
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongName"
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,6 +9,8 @@ import java.io.File;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
@ -46,6 +48,12 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
protected void assertOutputContains(String output, Set<String> lines) {
|
||||
for (String line : lines) {
|
||||
assertOutputContains(output, line);
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertOutputContains(String output, String line) {
|
||||
assertTrue(
|
||||
"Expected the following line in output:\n\n" + line + "\n\nOutput is:\n" + output,
|
||||
|
@ -66,15 +74,24 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
protected void assertTaskSuccessfull(BuildResult result, String taskName) {
|
||||
protected void assertTaskFailed(BuildResult result, String taskName) {
|
||||
assertTaskOutcome(result, taskName, TaskOutcome.FAILED);
|
||||
}
|
||||
|
||||
protected void assertTaskSuccessful(BuildResult result, String taskName) {
|
||||
assertTaskOutcome(result, taskName, TaskOutcome.SUCCESS);
|
||||
}
|
||||
|
||||
private void assertTaskOutcome(BuildResult result, String taskName, TaskOutcome taskOutcome) {
|
||||
BuildTask task = result.task(taskName);
|
||||
if (task == null) {
|
||||
fail("Expected task `" + taskName + "` to be successful, but it did not run");
|
||||
fail("Expected task `" + taskName + "` to be " + taskOutcome +", but it did not run" +
|
||||
"\n\nOutput is:\n" + result.getOutput());
|
||||
}
|
||||
assertEquals(
|
||||
"Expected task to be successful but it was: " + task.getOutcome() +
|
||||
"\n\nOutput is:\n" + result.getOutput() ,
|
||||
TaskOutcome.SUCCESS,
|
||||
"Expected task `" + taskName +"` to be successful but it was: " + task.getOutcome() +
|
||||
taskOutcome + "\n\nOutput is:\n" + result.getOutput() ,
|
||||
taskOutcome,
|
||||
task.getOutcome()
|
||||
);
|
||||
}
|
||||
|
@ -109,4 +126,17 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
|||
Files.exists(absPath)
|
||||
);
|
||||
}
|
||||
|
||||
protected String getLocalTestRepoPath() {
|
||||
String property = System.getProperty("test.local-test-repo-path");
|
||||
Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests");
|
||||
File file = new File(property);
|
||||
assertTrue("Expected " + property + " to exist, but it did not!", file.exists());
|
||||
if (File.separator.equals("\\")) {
|
||||
// Use / on Windows too, the build script is not happy with \
|
||||
return file.getAbsolutePath().replace(File.separator, "/");
|
||||
} else {
|
||||
return file.getAbsolutePath();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
plugins {
|
||||
id 'elasticsearch.clusterformation'
|
||||
}
|
||||
|
||||
elasticSearchClusters {
|
||||
myTestCluster {
|
||||
distribution = 'ZIP'
|
||||
}
|
||||
}
|
||||
|
||||
task user1 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
doLast {
|
||||
println "user1 executing"
|
||||
}
|
||||
}
|
||||
|
||||
task user2 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
doLast {
|
||||
println "user2 executing"
|
||||
}
|
||||
}
|
||||
|
||||
task upToDate1 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
||||
|
||||
task upToDate2 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
||||
|
||||
task skipped1 {
|
||||
enabled = false
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
||||
|
||||
task skipped2 {
|
||||
enabled = false
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
this is a test license file
|
|
@ -0,0 +1 @@
|
|||
this is a test notice file
|
|
@ -0,0 +1,36 @@
|
|||
plugins {
|
||||
id 'java'
|
||||
id 'elasticsearch.build'
|
||||
}
|
||||
|
||||
ext.licenseFile = file("LICENSE")
|
||||
ext.noticeFile = file("NOTICE")
|
||||
|
||||
dependencies {
|
||||
compile "junit:junit:${versions.junit}"
|
||||
// missing classes in thirdparty audit
|
||||
compile 'org.hamcrest:hamcrest-core:1.3'
|
||||
}
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
repositories {
|
||||
maven {
|
||||
url System.getProperty("local.repo.path")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// todo remove offending rules
|
||||
forbiddenApisMain.enabled = false
|
||||
forbiddenApisTest.enabled = false
|
||||
// requires dependency on testing fw
|
||||
jarHell.enabled = false
|
||||
// we don't have tests for now
|
||||
test.enabled = false
|
||||
|
||||
task hello {
|
||||
doFirst {
|
||||
println "build plugin can be applied"
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
42a25dc3219429f0e5d060061f71acb49bf010a0
|
|
@ -0,0 +1 @@
|
|||
2973d150c0dc1fefe998f834810d68f278ea58ec
|
|
@ -16,9 +16,11 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch;
|
||||
|
||||
/**
|
||||
* Request and Response objects for the default distribution's Machine
|
||||
* Learning APIs.
|
||||
* This is just a test class
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml;
|
||||
public class SampleClass {
|
||||
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
plugins {
|
||||
id 'java'
|
||||
id 'elasticsearch.build'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
dependenciesInfo.enabled = false
|
||||
forbiddenApisMain.enabled = false
|
||||
forbiddenApisTest.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
namingConventions.enabled = false
|
||||
ext.licenseFile = file("$buildDir/dummy/license")
|
||||
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
repositories {
|
||||
maven {
|
||||
url System.getProperty("local.repo.path")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// Needed for the JarHell task
|
||||
testCompile ("org.elasticsearch.test:framework:${versions.elasticsearch}")
|
||||
// causes jar hell with local sources
|
||||
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package org.apache.logging.log4j;
|
||||
|
||||
// Jar Hell !
|
||||
public class Logger {
|
||||
|
||||
}
|
||||
|
|
@ -13,14 +13,8 @@ thirdPartyAudit.enabled = false
|
|||
ext.licenseFile = file("$buildDir/dummy/license")
|
||||
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||
|
||||
task hello {
|
||||
doFirst {
|
||||
println "build plugin can be applied"
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile "junit:junit:${versions.junit}"
|
||||
compile "junit:junit:4.12"
|
||||
}
|
||||
|
||||
namingConventions {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
elasticsearch = 7.0.0-alpha1
|
||||
lucene = 7.5.0-snapshot-13b9e28f9d
|
||||
lucene = 8.0.0-snapshot-66c671ea80
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
|
|
@ -23,8 +23,7 @@ apply plugin: 'application'
|
|||
group = 'org.elasticsearch.client'
|
||||
|
||||
// Not published so no need to assemble
|
||||
tasks.remove(assemble)
|
||||
build.dependsOn.remove('assemble')
|
||||
assemble.enabled = true
|
||||
|
||||
archivesBaseName = 'client-benchmarks'
|
||||
mainClassName = 'org.elasticsearch.client.benchmark.BenchmarkMain'
|
||||
|
|
|
@ -28,8 +28,7 @@ esplugin {
|
|||
}
|
||||
|
||||
// Not published so no need to assemble
|
||||
tasks.remove(assemble)
|
||||
build.dependsOn.remove('assemble')
|
||||
assemble.enabled = false
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
dependenciesInfo.enabled = false
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
import org.gradle.api.internal.provider.Providers
|
||||
|
||||
|
@ -47,13 +45,13 @@ dependencies {
|
|||
* Everything in the "shadow" configuration is *not* copied into the
|
||||
* shadowJar.
|
||||
*/
|
||||
shadow "org.elasticsearch:elasticsearch:${version}"
|
||||
shadow "org.elasticsearch.client:elasticsearch-rest-client:${version}"
|
||||
shadow "org.elasticsearch.plugin:parent-join-client:${version}"
|
||||
shadow "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
|
||||
shadow "org.elasticsearch.plugin:rank-eval-client:${version}"
|
||||
shadow "org.elasticsearch.plugin:lang-mustache-client:${version}"
|
||||
compile project(':x-pack:protocol')
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
compile "org.elasticsearch.client:elasticsearch-rest-client:${version}"
|
||||
compile "org.elasticsearch.plugin:parent-join-client:${version}"
|
||||
compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
|
||||
compile "org.elasticsearch.plugin:rank-eval-client:${version}"
|
||||
compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
|
||||
bundle project(':x-pack:protocol')
|
||||
|
||||
testCompile "org.elasticsearch.client:test:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
|
@ -75,10 +73,31 @@ dependencyLicenses {
|
|||
forbiddenApisMain {
|
||||
// core does not depend on the httpclient for compile so we add the signatures here. We don't add them for test as they are already
|
||||
// specified
|
||||
signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')]
|
||||
signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()]
|
||||
addSignatureFiles 'http-signatures'
|
||||
signaturesFiles += files('src/main/resources/forbidden/rest-high-level-signatures.txt')
|
||||
}
|
||||
|
||||
integTestRunner {
|
||||
systemProperty 'tests.rest.cluster.username', System.getProperty('tests.rest.cluster.username', 'test_user')
|
||||
systemProperty 'tests.rest.cluster.password', System.getProperty('tests.rest.cluster.password', 'test-password')
|
||||
}
|
||||
|
||||
integTestCluster {
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setupCommand 'setupDummyUser',
|
||||
'bin/elasticsearch-users',
|
||||
'useradd', System.getProperty('tests.rest.cluster.username', 'test_user'),
|
||||
'-p', System.getProperty('tests.rest.cluster.password', 'test-password'),
|
||||
'-r', 'superuser'
|
||||
waitCondition = { node, ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",
|
||||
dest: tmpFile.toString(),
|
||||
username: System.getProperty('tests.rest.cluster.username', 'test_user'),
|
||||
password: System.getProperty('tests.rest.cluster.password', 'test-password'),
|
||||
ignoreerrors: true,
|
||||
retries: 10)
|
||||
return tmpFile.exists()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public final class ClusterClient {
|
|||
*/
|
||||
public ClusterUpdateSettingsResponse putSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(clusterUpdateSettingsRequest, RequestConverters::clusterPutSettings,
|
||||
return restHighLevelClient.performRequestAndParseEntity(clusterUpdateSettingsRequest, ClusterRequestConverters::clusterPutSettings,
|
||||
options, ClusterUpdateSettingsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ public final class ClusterClient {
|
|||
*/
|
||||
public void putSettingsAsync(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, RequestOptions options,
|
||||
ActionListener<ClusterUpdateSettingsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest, RequestConverters::clusterPutSettings,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest, ClusterRequestConverters::clusterPutSettings,
|
||||
options, ClusterUpdateSettingsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ public final class ClusterClient {
|
|||
*/
|
||||
public ClusterGetSettingsResponse getSettings(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(clusterGetSettingsRequest, RequestConverters::clusterGetSettings,
|
||||
return restHighLevelClient.performRequestAndParseEntity(clusterGetSettingsRequest, ClusterRequestConverters::clusterGetSettings,
|
||||
options, ClusterGetSettingsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public final class ClusterClient {
|
|||
*/
|
||||
public void getSettingsAsync(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options,
|
||||
ActionListener<ClusterGetSettingsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(clusterGetSettingsRequest, RequestConverters::clusterGetSettings,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(clusterGetSettingsRequest, ClusterRequestConverters::clusterGetSettings,
|
||||
options, ClusterGetSettingsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ public final class ClusterClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public ClusterHealthResponse health(ClusterHealthRequest healthRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(healthRequest, RequestConverters::clusterHealth, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(healthRequest, ClusterRequestConverters::clusterHealth, options,
|
||||
ClusterHealthResponse::fromXContent, singleton(RestStatus.REQUEST_TIMEOUT.getStatus()));
|
||||
}
|
||||
|
||||
|
@ -129,7 +129,7 @@ public final class ClusterClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void healthAsync(ClusterHealthRequest healthRequest, RequestOptions options, ActionListener<ClusterHealthResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(healthRequest, RequestConverters::clusterHealth, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(healthRequest, ClusterRequestConverters::clusterHealth, options,
|
||||
ClusterHealthResponse::fromXContent, listener, singleton(RestStatus.REQUEST_TIMEOUT.getStatus()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
final class ClusterRequestConverters {
|
||||
|
||||
static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings");
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(clusterUpdateSettingsRequest.timeout());
|
||||
parameters.withMasterTimeout(clusterUpdateSettingsRequest.masterNodeTimeout());
|
||||
|
||||
request.setEntity(RequestConverters.createEntity(clusterUpdateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings");
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withLocal(clusterGetSettingsRequest.local());
|
||||
parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults());
|
||||
parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request clusterHealth(ClusterHealthRequest healthRequest) {
|
||||
String[] indices = healthRequest.indices() == null ? Strings.EMPTY_ARRAY : healthRequest.indices();
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_cluster/health")
|
||||
.addCommaSeparatedPathParts(indices)
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
new RequestConverters.Params(request)
|
||||
.withWaitForStatus(healthRequest.waitForStatus())
|
||||
.withWaitForNoRelocatingShards(healthRequest.waitForNoRelocatingShards())
|
||||
.withWaitForNoInitializingShards(healthRequest.waitForNoInitializingShards())
|
||||
.withWaitForActiveShards(healthRequest.waitForActiveShards(), ActiveShardCount.NONE)
|
||||
.withWaitForNodes(healthRequest.waitForNodes())
|
||||
.withWaitForEvents(healthRequest.waitForEvents())
|
||||
.withTimeout(healthRequest.timeout())
|
||||
.withMasterTimeout(healthRequest.masterNodeTimeout())
|
||||
.withLocal(healthRequest.local())
|
||||
.withLevel(healthRequest.level());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
|
||||
public class GraphClient {
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
GraphClient(RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an exploration request using the Graph API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">Graph API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final GraphExploreResponse explore(GraphExploreRequest graphExploreRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, GraphRequestConverters::explore,
|
||||
options, GraphExploreResponse::fromXContext, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes an exploration request using the Graph API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">Graph API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final void exploreAsync(GraphExploreRequest graphExploreRequest,
|
||||
RequestOptions options,
|
||||
ActionListener<GraphExploreResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, GraphRequestConverters::explore,
|
||||
options, GraphExploreResponse::fromXContext, listener, emptySet());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GraphRequestConverters {
|
||||
|
||||
static Request explore(GraphExploreRequest exploreRequest) throws IOException {
|
||||
String endpoint = RequestConverters.endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(RequestConverters.createEntity(exploreRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -55,7 +55,7 @@ public final class IngestClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::putPipeline, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ public final class IngestClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::putPipeline, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,7 @@ public final class IngestClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::getPipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options,
|
||||
GetPipelineResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ public final class IngestClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener<GetPipelineResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::getPipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options,
|
||||
GetPipelineResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ public final class IngestClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ public final class IngestClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ public final class IngestClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
|
||||
SimulatePipelineResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -157,7 +157,7 @@ public final class IngestClient {
|
|||
public void simulateAsync(SimulatePipelineRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<SimulatePipelineResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
|
||||
SimulatePipelineResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IngestRequestConverters {
|
||||
|
||||
static Request getPipeline(GetPipelineRequest getPipelineRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addCommaSeparatedPathParts(getPipelineRequest.getIds())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(putPipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(putPipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
|
||||
|
||||
request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(deletePipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(deletePipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException {
|
||||
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline");
|
||||
if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) {
|
||||
builder.addPathPart(simulatePipelineRequest.getId());
|
||||
}
|
||||
builder.addPathPartAsIs("_simulate");
|
||||
String endpoint = builder.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
|
||||
request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -65,7 +65,7 @@ public final class LicenseClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public PutLicenseResponse putLicense(PutLicenseRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::putLicense, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, LicenseRequestConverters::putLicense, options,
|
||||
PutLicenseResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ public final class LicenseClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void putLicenseAsync(PutLicenseRequest request, RequestOptions options, ActionListener<PutLicenseResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::putLicense, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, LicenseRequestConverters::putLicense, options,
|
||||
PutLicenseResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ public final class LicenseClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetLicenseResponse getLicense(GetLicenseRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequest(request, RequestConverters::getLicense, options,
|
||||
return restHighLevelClient.performRequest(request, LicenseRequestConverters::getLicense, options,
|
||||
response -> new GetLicenseResponse(convertResponseToJson(response)), emptySet());
|
||||
}
|
||||
|
||||
|
@ -96,7 +96,7 @@ public final class LicenseClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getLicenseAsync(GetLicenseRequest request, RequestOptions options, ActionListener<GetLicenseResponse> listener) {
|
||||
restHighLevelClient.performRequestAsync(request, RequestConverters::getLicense, options,
|
||||
restHighLevelClient.performRequestAsync(request, LicenseRequestConverters::getLicense, options,
|
||||
response -> new GetLicenseResponse(convertResponseToJson(response)), listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ public final class LicenseClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse deleteLicense(DeleteLicenseRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::deleteLicense, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, LicenseRequestConverters::deleteLicense, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -117,7 +117,7 @@ public final class LicenseClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteLicenseAsync(DeleteLicenseRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::deleteLicense, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, LicenseRequestConverters::deleteLicense, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
|
||||
|
||||
public class LicenseRequestConverters {
|
||||
static Request putLicense(PutLicenseRequest putLicenseRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("license")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(putLicenseRequest.timeout());
|
||||
parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout());
|
||||
if (putLicenseRequest.isAcknowledge()) {
|
||||
parameters.putParam("acknowledge", "true");
|
||||
}
|
||||
request.setJsonEntity(putLicenseRequest.getLicenseDefinition());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getLicense(GetLicenseRequest getLicenseRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("license")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withLocal(getLicenseRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) {
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license");
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(deleteLicenseRequest.timeout());
|
||||
parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -19,17 +19,35 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
|
||||
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
|
||||
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createContentType;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
|
||||
final class MLRequestConverters {
|
||||
|
@ -48,6 +66,40 @@ final class MLRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getJob(GetJobRequest getJobRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds()))
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
if (getJobRequest.isAllowNoJobs() != null) {
|
||||
params.putParam("allow_no_jobs", Boolean.toString(getJobRequest.isAllowNoJobs()));
|
||||
}
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
|
||||
.addPathPartAsIs("_stats")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
if (getJobStatsRequest.isAllowNoJobs() != null) {
|
||||
params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request openJob(OpenJobRequest openJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
@ -57,7 +109,20 @@ final class MLRequestConverters {
|
|||
.addPathPartAsIs("_open")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setJsonEntity(openJobRequest.toString());
|
||||
request.setEntity(createEntity(openJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request closeJob(CloseJobRequest closeJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(closeJobRequest.getJobIds()))
|
||||
.addPathPartAsIs("_close")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(closeJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -75,4 +140,128 @@ final class MLRequestConverters {
|
|||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(flushJobRequest.getJobId())
|
||||
.addPathPartAsIs("_flush")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(forecastJobRequest.getJobId())
|
||||
.addPathPartAsIs("_forecast")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(forecastJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(updateJobRequest.getJobUpdate().getJobId())
|
||||
.addPathPartAsIs("_update")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(updateJobRequest.getJobUpdate(), REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getBucketsRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
.addPathPartAsIs("buckets")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getOverallBucketsRequest.getJobIds()))
|
||||
.addPathPartAsIs("results")
|
||||
.addPathPartAsIs("overall_buckets")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(getOverallBucketsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getRecordsRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
.addPathPartAsIs("records")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(getRecordsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request postData(PostDataRequest postDataRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(postDataRequest.getJobId())
|
||||
.addPathPartAsIs("_data")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
if (postDataRequest.getResetStart() != null) {
|
||||
params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart());
|
||||
}
|
||||
if (postDataRequest.getResetEnd() != null) {
|
||||
params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd());
|
||||
}
|
||||
BytesReference content = postDataRequest.getContent();
|
||||
if (content != null) {
|
||||
BytesRef source = postDataRequest.getContent().toBytesRef();
|
||||
HttpEntity byteEntity = new ByteArrayEntity(source.bytes,
|
||||
source.offset,
|
||||
source.length,
|
||||
createContentType(postDataRequest.getXContentType()));
|
||||
request.setEntity(byteEntity);
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getInfluencersRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
.addPathPartAsIs("influencers")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(getInfluencersRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,12 +19,34 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
|
||||
import org.elasticsearch.protocol.xpack.ml.OpenJobResponse;
|
||||
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
|
||||
import org.elasticsearch.protocol.xpack.ml.PutJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersResponse;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
import org.elasticsearch.client.ml.GetJobResponse;
|
||||
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetJobStatsResponse;
|
||||
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetOverallBucketsResponse;
|
||||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.GetRecordsResponse;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.PutJobResponse;
|
||||
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
@ -50,9 +72,9 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html">ML PUT job documentation</a>
|
||||
*
|
||||
* @param request the PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings
|
||||
* @param request The PutJobRequest containing the {@link org.elasticsearch.client.ml.job.config.Job} settings
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return PutJobResponse with enclosed {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} object
|
||||
* @return PutJobResponse with enclosed {@link org.elasticsearch.client.ml.job.config.Job} object
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException {
|
||||
|
@ -69,7 +91,7 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html">ML PUT job documentation</a>
|
||||
*
|
||||
* @param request the request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings
|
||||
* @param request The request containing the {@link org.elasticsearch.client.ml.job.config.Job} settings
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
|
@ -82,13 +104,95 @@ public final class MachineLearningClient {
|
|||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html"></a>
|
||||
* </p>
|
||||
* @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link GetJobResponse} response object containing
|
||||
* the {@link org.elasticsearch.client.ml.job.config.Job} objects and the number of jobs found
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getJob,
|
||||
options,
|
||||
GetJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info, asynchronously.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html"></a>
|
||||
* </p>
|
||||
* @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified with {@link GetJobResponse} upon request completion
|
||||
*/
|
||||
public void getJobAsync(GetJobRequest request, RequestOptions options, ActionListener<GetJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getJob,
|
||||
options,
|
||||
GetJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets usage statistics for one or more Machine Learning jobs
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
|
||||
* </p>
|
||||
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link GetJobStatsResponse} response object containing
|
||||
* the {@link JobStats} objects and the number of jobs found
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getJobStats,
|
||||
options,
|
||||
GetJobStatsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info, asynchronously.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
|
||||
* </p>
|
||||
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
|
||||
*/
|
||||
public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener<GetJobStatsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getJobStats,
|
||||
options,
|
||||
GetJobStatsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the given Machine Learning Job
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete Job documentation</a>
|
||||
* </p>
|
||||
* @param request the request to delete the job
|
||||
* @param request The request to delete the job
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return action acknowledgement
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
|
@ -107,7 +211,7 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete Job documentation</a>
|
||||
* </p>
|
||||
* @param request the request to delete the job
|
||||
* @param request The request to delete the job
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
|
@ -131,7 +235,7 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html"></a>
|
||||
* </p>
|
||||
* @param request request containing job_id and additional optional options
|
||||
* @param request Request containing job_id and additional optional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing if the job was successfully opened or not.
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
|
@ -154,7 +258,7 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html"></a>
|
||||
* </p>
|
||||
* @param request request containing job_id and additional optional options
|
||||
* @param request Request containing job_id and additional optional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
|
@ -166,4 +270,367 @@ public final class MachineLearningClient {
|
|||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes one or more Machine Learning Jobs. A job can be opened and closed multiple times throughout its lifecycle.
|
||||
*
|
||||
* A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.
|
||||
*
|
||||
* @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing if the job was successfully closed or not.
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public CloseJobResponse closeJob(CloseJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::closeJob,
|
||||
options,
|
||||
CloseJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes one or more Machine Learning Jobs asynchronously, notifies listener on completion
|
||||
*
|
||||
* A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.
|
||||
*
|
||||
* @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void closeJobAsync(CloseJobRequest request, RequestOptions options, ActionListener<CloseJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::closeJob,
|
||||
options,
|
||||
CloseJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
|
||||
* This may cause new results to be calculated depending on the contents of the buffer
|
||||
*
|
||||
* Both flush and close operations are similar,
|
||||
* however the flush is more efficient if you are expecting to send more data for analysis.
|
||||
*
|
||||
* When flushing, the job remains open and is available to continue analyzing data.
|
||||
* A close operation additionally prunes and persists the model state to disk and the
|
||||
* job must be opened again before analyzing further data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
|
||||
*
|
||||
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::flushJob,
|
||||
options,
|
||||
FlushJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
|
||||
* This may cause new results to be calculated depending on the contents of the buffer
|
||||
*
|
||||
* Both flush and close operations are similar,
|
||||
* however the flush is more efficient if you are expecting to send more data for analysis.
|
||||
*
|
||||
* When flushing, the job remains open and is available to continue analyzing data.
|
||||
* A close operation additionally prunes and persists the model state to disk and the
|
||||
* job must be opened again before analyzing further data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
|
||||
*
|
||||
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener<FlushJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::flushJob,
|
||||
options,
|
||||
FlushJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a forecast of an existing, opened Machine Learning Job
|
||||
*
|
||||
* This predicts the future behavior of a time series by using its historical behavior.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
* </p>
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing forecast acknowledgement and new forecast's ID
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
|
||||
*
|
||||
* @param request the {@link UpdateJobRequest} object enclosing the desired updates
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return a PutJobResponse object containing the updated job object
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::updateJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a forecast of an existing, opened Machine Learning Job asynchronously
|
||||
*
|
||||
* This predicts the future behavior of a time series by using its historical behavior.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
* </p>
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener<ForecastJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously
|
||||
*
|
||||
* @param request the {@link UpdateJobRequest} object enclosing the desired updates
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener<PutJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::updateJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the buckets for a Machine Learning Job.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html">ML GET buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getBuckets,
|
||||
options,
|
||||
GetBucketsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html">ML GET buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener<GetBucketsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getBuckets,
|
||||
options,
|
||||
GetBucketsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets overall buckets for a set of Machine Learning Jobs.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html">
|
||||
* ML GET overall buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetOverallBucketsResponse getOverallBuckets(GetOverallBucketsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getOverallBuckets,
|
||||
options,
|
||||
GetOverallBucketsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets overall buckets for a set of Machine Learning Jobs, notifies listener once the requested buckets are retrieved.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html">
|
||||
* ML GET overall buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void getOverallBucketsAsync(GetOverallBucketsRequest request, RequestOptions options,
|
||||
ActionListener<GetOverallBucketsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getOverallBuckets,
|
||||
options,
|
||||
GetOverallBucketsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the records for a Machine Learning Job.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html">ML GET records documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetRecordsResponse getRecords(GetRecordsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getRecords,
|
||||
options,
|
||||
GetRecordsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the records for a Machine Learning Job, notifies listener once the requested records are retrieved.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html">ML GET records documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void getRecordsAsync(GetRecordsRequest request, RequestOptions options, ActionListener<GetRecordsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getRecords,
|
||||
options,
|
||||
GetRecordsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends data to an anomaly detection job for analysis.
|
||||
*
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
* </p>
|
||||
*
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing operational progress about the job
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends data to an anomaly detection job for analysis, asynchronously
|
||||
*
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
* </p>
|
||||
*
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void postDataAsync(PostDataRequest request, RequestOptions options, ActionListener<PostDataResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the influencers for a Machine Learning Job.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html">
|
||||
* ML GET influencers documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetInfluencersResponse getInfluencers(GetInfluencersRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getInfluencers,
|
||||
options,
|
||||
GetInfluencersResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the influencers for a Machine Learning Job, notifies listener once the requested influencers are retrieved.
|
||||
* <p>
|
||||
* For additional info
|
||||
* * see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html">
|
||||
* ML GET influencers documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void getInfluencersAsync(GetInfluencersRequest request, RequestOptions options,
|
||||
ActionListener<GetInfluencersResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getInfluencers,
|
||||
options,
|
||||
GetInfluencersResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ public final class MigrationClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public IndexUpgradeInfoResponse getAssistance(IndexUpgradeInfoRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::getMigrationAssistance, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, MigrationRequestConverters::getMigrationAssistance, options,
|
||||
IndexUpgradeInfoResponse::fromXContent, Collections.emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
|
||||
public class MigrationRequestConverters {
|
||||
|
||||
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
|
||||
RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/migration/assistance")
|
||||
.addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices());
|
||||
String endpoint = endpointBuilder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -30,21 +30,9 @@ import org.apache.http.entity.ContentType;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
|
@ -76,10 +64,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
|||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
|
@ -88,6 +72,7 @@ import org.elasticsearch.action.support.ActiveShardCount;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.security.RefreshPolicy;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Priority;
|
||||
|
@ -106,14 +91,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest;
|
||||
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.index.reindex.ReindexRequest;
|
||||
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
|
@ -125,10 +106,8 @@ import java.io.IOException;
|
|||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
final class RequestConverters {
|
||||
static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON;
|
||||
|
@ -137,17 +116,6 @@ final class RequestConverters {
|
|||
// Contains only status utility methods
|
||||
}
|
||||
|
||||
static Request cancelTasks(CancelTasksRequest cancelTasksRequest) {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel");
|
||||
Params params = new Params(request);
|
||||
params.withTimeout(cancelTasksRequest.getTimeout())
|
||||
.withTaskId(cancelTasksRequest.getTaskId())
|
||||
.withNodes(cancelTasksRequest.getNodes())
|
||||
.withParentTaskId(cancelTasksRequest.getParentTaskId())
|
||||
.withActions(cancelTasksRequest.getActions());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request delete(DeleteRequest deleteRequest) {
|
||||
String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
@ -718,104 +686,71 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings");
|
||||
static Request reindex(ReindexRequest reindexRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
.withRefresh(reindexRequest.isRefresh())
|
||||
.withTimeout(reindexRequest.getTimeout())
|
||||
.withWaitForActiveShards(reindexRequest.getWaitForActiveShards());
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(clusterUpdateSettingsRequest.timeout());
|
||||
parameters.withMasterTimeout(clusterUpdateSettingsRequest.masterNodeTimeout());
|
||||
|
||||
request.setEntity(createEntity(clusterUpdateSettingsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
if (reindexRequest.getScrollTime() != null) {
|
||||
params.putParam("scroll", reindexRequest.getScrollTime());
|
||||
}
|
||||
request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings");
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withLocal(clusterGetSettingsRequest.local());
|
||||
parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults());
|
||||
parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout());
|
||||
|
||||
static Request updateByQuery(UpdateByQueryRequest updateByQueryRequest) throws IOException {
|
||||
String endpoint =
|
||||
endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
.withRouting(updateByQueryRequest.getRouting())
|
||||
.withPipeline(updateByQueryRequest.getPipeline())
|
||||
.withRefresh(updateByQueryRequest.isRefresh())
|
||||
.withTimeout(updateByQueryRequest.getTimeout())
|
||||
.withWaitForActiveShards(updateByQueryRequest.getWaitForActiveShards())
|
||||
.withIndicesOptions(updateByQueryRequest.indicesOptions());
|
||||
if (updateByQueryRequest.isAbortOnVersionConflict() == false) {
|
||||
params.putParam("conflicts", "proceed");
|
||||
}
|
||||
if (updateByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
|
||||
params.putParam("scroll_size", Integer.toString(updateByQueryRequest.getBatchSize()));
|
||||
}
|
||||
if (updateByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
|
||||
params.putParam("scroll", updateByQueryRequest.getScrollTime());
|
||||
}
|
||||
if (updateByQueryRequest.getSize() > 0) {
|
||||
params.putParam("size", Integer.toString(updateByQueryRequest.getSize()));
|
||||
}
|
||||
request.setEntity(createEntity(updateByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getPipeline(GetPipelineRequest getPipelineRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addCommaSeparatedPathParts(getPipelineRequest.getIds())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
|
||||
return request;
|
||||
static Request deleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException {
|
||||
String endpoint =
|
||||
endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
.withRouting(deleteByQueryRequest.getRouting())
|
||||
.withRefresh(deleteByQueryRequest.isRefresh())
|
||||
.withTimeout(deleteByQueryRequest.getTimeout())
|
||||
.withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards())
|
||||
.withIndicesOptions(deleteByQueryRequest.indicesOptions());
|
||||
if (deleteByQueryRequest.isAbortOnVersionConflict() == false) {
|
||||
params.putParam("conflicts", "proceed");
|
||||
}
|
||||
|
||||
static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(putPipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(putPipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
|
||||
|
||||
request.setEntity(createEntity(putPipelineRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
|
||||
params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize()));
|
||||
}
|
||||
|
||||
static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(deletePipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(deletePipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
|
||||
params.putParam("scroll", deleteByQueryRequest.getScrollTime());
|
||||
}
|
||||
|
||||
static Request listTasks(ListTasksRequest listTaskRequest) {
|
||||
if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) {
|
||||
throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
|
||||
if (deleteByQueryRequest.getSize() > 0) {
|
||||
params.putParam("size", Integer.toString(deleteByQueryRequest.getSize()));
|
||||
}
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
|
||||
Params params = new Params(request);
|
||||
params.withTimeout(listTaskRequest.getTimeout())
|
||||
.withDetailed(listTaskRequest.getDetailed())
|
||||
.withWaitForCompletion(listTaskRequest.getWaitForCompletion())
|
||||
.withParentTaskId(listTaskRequest.getParentTaskId())
|
||||
.withNodes(listTaskRequest.getNodes())
|
||||
.withActions(listTaskRequest.getActions())
|
||||
.putParam("group_by", "none");
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request clusterHealth(ClusterHealthRequest healthRequest) {
|
||||
String[] indices = healthRequest.indices() == null ? Strings.EMPTY_ARRAY : healthRequest.indices();
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_cluster/health")
|
||||
.addCommaSeparatedPathParts(indices)
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
new Params(request)
|
||||
.withWaitForStatus(healthRequest.waitForStatus())
|
||||
.withWaitForNoRelocatingShards(healthRequest.waitForNoRelocatingShards())
|
||||
.withWaitForNoInitializingShards(healthRequest.waitForNoInitializingShards())
|
||||
.withWaitForActiveShards(healthRequest.waitForActiveShards(), ActiveShardCount.NONE)
|
||||
.withWaitForNodes(healthRequest.waitForNodes())
|
||||
.withWaitForEvents(healthRequest.waitForEvents())
|
||||
.withTimeout(healthRequest.timeout())
|
||||
.withMasterTimeout(healthRequest.masterNodeTimeout())
|
||||
.withLocal(healthRequest.local())
|
||||
.withLevel(healthRequest.level());
|
||||
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -898,126 +833,6 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) {
|
||||
String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories();
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories).build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout());
|
||||
parameters.withLocal(getRepositoriesRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(putRepositoryRequest.timeout());
|
||||
parameters.withVerify(putRepositoryRequest.verify());
|
||||
|
||||
request.setEntity(createEntity(putRepositoryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(deleteRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(verifyRepositoryRequest.name())
|
||||
.addPathPartAsIs("_verify")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(verifyRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_snapshot")
|
||||
.addPathPart(createSnapshotRequest.repository())
|
||||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(getSnapshotsRequest.repository());
|
||||
String endpoint;
|
||||
if (getSnapshotsRequest.snapshots().length == 0) {
|
||||
endpoint = endpointBuilder.addPathPart("_all").build();
|
||||
} else {
|
||||
endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build();
|
||||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(snapshotsStatusRequest.repository())
|
||||
.addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots())
|
||||
.addPathPartAsIs("_status")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
|
||||
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(restoreSnapshotRequest.repository())
|
||||
.addPathPart(restoreSnapshotRequest.snapshot())
|
||||
.addPathPartAsIs("_restore")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout());
|
||||
parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(createEntity(restoreSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(deleteSnapshotRequest.repository())
|
||||
.addPathPart(deleteSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
@ -1047,20 +862,6 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException {
|
||||
EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ingest/pipeline");
|
||||
if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) {
|
||||
builder.addPathPart(simulatePipelineRequest.getId());
|
||||
}
|
||||
builder.addPathPartAsIs("_simulate");
|
||||
String endpoint = builder.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
|
||||
request.setEntity(createEntity(simulatePipelineRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getAlias(GetAliasesRequest getAliasesRequest) {
|
||||
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
||||
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
||||
|
@ -1082,6 +883,19 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withTimeout(putStoredScriptRequest.timeout());
|
||||
params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout());
|
||||
if (Strings.hasText(putStoredScriptRequest.context())) {
|
||||
params.putParam("context", putStoredScriptRequest.context());
|
||||
}
|
||||
request.setEntity(createEntity(putStoredScriptRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request analyze(AnalyzeRequest request) throws IOException {
|
||||
EndpointBuilder builder = new EndpointBuilder();
|
||||
String index = request.index();
|
||||
|
@ -1111,103 +925,6 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request xPackInfo(XPackInfoRequest infoRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack");
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
request.addParameter("human", "false");
|
||||
}
|
||||
if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) {
|
||||
request.addParameter("categories", infoRequest.getCategories().stream()
|
||||
.map(c -> c.toString().toLowerCase(Locale.ROOT))
|
||||
.collect(Collectors.joining(",")));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(putWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request).withVersion(putWatchRequest.getVersion());
|
||||
if (putWatchRequest.isActive() == false) {
|
||||
params.putParam("active", "false");
|
||||
}
|
||||
ContentType contentType = createContentType(putWatchRequest.xContentType());
|
||||
BytesReference source = putWatchRequest.getSource();
|
||||
request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackWatcherDeleteWatch(DeleteWatchRequest deleteWatchRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(deleteWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xpackUsage(XPackUsageRequest usageRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage");
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(usageRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putLicense(PutLicenseRequest putLicenseRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("license")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(putLicenseRequest.timeout());
|
||||
parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout());
|
||||
if (putLicenseRequest.isAcknowledge()) {
|
||||
parameters.putParam("acknowledge", "true");
|
||||
}
|
||||
request.setJsonEntity(putLicenseRequest.getLicenseDefinition());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getLicense(GetLicenseRequest getLicenseRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("license")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withLocal(getLicenseRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) {
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license");
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(deleteLicenseRequest.timeout());
|
||||
parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/migration/assistance")
|
||||
.addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices());
|
||||
String endpoint = endpointBuilder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions());
|
||||
return request;
|
||||
}
|
||||
|
||||
static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
|
||||
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
|
||||
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||
|
@ -1329,11 +1046,16 @@ final class RequestConverters {
|
|||
|
||||
Params withRefresh(boolean refresh) {
|
||||
if (refresh) {
|
||||
return withRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
return withRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated If creating a new HLRC ReST API call, use {@link RefreshPolicy}
|
||||
* instead of {@link WriteRequest.RefreshPolicy} from the server project
|
||||
*/
|
||||
@Deprecated
|
||||
Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) {
|
||||
if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
|
||||
return putParam("refresh", refreshPolicy.getValue());
|
||||
|
@ -1341,6 +1063,13 @@ final class RequestConverters {
|
|||
return this;
|
||||
}
|
||||
|
||||
Params withRefreshPolicy(RefreshPolicy refreshPolicy) {
|
||||
if (refreshPolicy != RefreshPolicy.NONE) {
|
||||
return putParam("refresh", refreshPolicy.getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
Params withRetryOnConflict(int retryOnConflict) {
|
||||
if (retryOnConflict > 0) {
|
||||
return putParam("retry_on_conflict", String.valueOf(retryOnConflict));
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
|||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
|
@ -65,6 +65,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
import org.elasticsearch.index.rankeval.RankEvalResponse;
|
||||
import org.elasticsearch.index.reindex.BulkByScrollResponse;
|
||||
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.index.reindex.ReindexRequest;
|
||||
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
|
||||
import org.elasticsearch.plugins.spi.NamedXContentProvider;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -117,38 +121,38 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.ParsedAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.ParsedCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.ParsedGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.ParsedGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.ParsedMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.ParsedMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ParsedScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ParsedValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
||||
|
@ -177,6 +181,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
@ -209,9 +214,11 @@ public class RestHighLevelClient implements Closeable {
|
|||
private final TasksClient tasksClient = new TasksClient(this);
|
||||
private final XPackClient xPackClient = new XPackClient(this);
|
||||
private final WatcherClient watcherClient = new WatcherClient(this);
|
||||
private final GraphClient graphClient = new GraphClient(this);
|
||||
private final LicenseClient licenseClient = new LicenseClient(this);
|
||||
private final MigrationClient migrationClient = new MigrationClient(this);
|
||||
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
||||
private final SecurityClient securityClient = new SecurityClient(this);
|
||||
|
||||
/**
|
||||
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
|
||||
|
@ -325,6 +332,16 @@ public class RestHighLevelClient implements Closeable {
|
|||
*/
|
||||
public WatcherClient watcher() { return watcherClient; }
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Graph explore API that
|
||||
* is shipped with the default distribution of Elasticsearch. All of
|
||||
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">
|
||||
* Graph API on elastic.co</a> for more information.
|
||||
*/
|
||||
public GraphClient graph() { return graphClient; }
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Licensing APIs that
|
||||
* are shipped with the default distribution of Elasticsearch. All of
|
||||
|
@ -361,6 +378,20 @@ public class RestHighLevelClient implements Closeable {
|
|||
return machineLearningClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Security APIs that
|
||||
* are shipped with the Elastic Stack distribution of Elasticsearch. All of
|
||||
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api.html">
|
||||
* Security APIs on elastic.co</a> for more information.
|
||||
*
|
||||
* @return the client wrapper for making Security API calls
|
||||
*/
|
||||
public SecurityClient security() {
|
||||
return securityClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a bulk request using the Bulk API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html">Bulk API on elastic.co</a>
|
||||
|
@ -384,6 +415,91 @@ public class RestHighLevelClient implements Closeable {
|
|||
performRequestAsyncAndParseEntity(bulkRequest, RequestConverters::bulk, options, BulkResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a reindex request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html">Reindex API on elastic.co</a>
|
||||
* @param reindexRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public final BulkByScrollResponse reindex(ReindexRequest reindexRequest, RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(
|
||||
reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a reindex request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html">Reindex API on elastic.co</a>
|
||||
* @param reindexRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public final void reindexAsync(ReindexRequest reindexRequest, RequestOptions options, ActionListener<BulkByScrollResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(
|
||||
reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a update by query request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html">
|
||||
* Update By Query API on elastic.co</a>
|
||||
* @param updateByQueryRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public final BulkByScrollResponse updateByQuery(UpdateByQueryRequest updateByQueryRequest, RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(
|
||||
updateByQueryRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes an update by query request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html">
|
||||
* Update By Query API on elastic.co</a>
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public final void updateByQueryAsync(UpdateByQueryRequest reindexRequest, RequestOptions options,
|
||||
ActionListener<BulkByScrollResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(
|
||||
reindexRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a delete by query request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html">
|
||||
* Delete By Query API on elastic.co</a>
|
||||
* @param deleteByQueryRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public final BulkByScrollResponse deleteByQuery(DeleteByQueryRequest deleteByQueryRequest, RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(
|
||||
deleteByQueryRequest, RequestConverters::deleteByQuery, options, BulkByScrollResponse::fromXContent, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a delete by query request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html">
|
||||
* Delete By Query API on elastic.co</a>
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public final void deleteByQueryAsync(DeleteByQueryRequest reindexRequest, RequestOptions options,
|
||||
ActionListener<BulkByScrollResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(
|
||||
reindexRequest, RequestConverters::deleteByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
|
@ -935,6 +1051,35 @@ public class RestHighLevelClient implements Closeable {
|
|||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts an stored script using the Scripting API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html"> Scripting API
|
||||
* on elastic.co</a>
|
||||
* @param putStoredScriptRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse putScript(PutStoredScriptRequest putStoredScriptRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(putStoredScriptRequest, RequestConverters::putScript, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously puts an stored script using the Scripting API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html"> Scripting API
|
||||
* on elastic.co</a>
|
||||
* @param putStoredScriptRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void putScriptAsync(PutStoredScriptRequest putStoredScriptRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(putStoredScriptRequest, RequestConverters::putScript, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Field Capabilities API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-field-caps.html">Field Capabilities API
|
||||
|
@ -949,6 +1094,11 @@ public class RestHighLevelClient implements Closeable {
|
|||
FieldCapabilitiesResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
|
||||
* layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
|
||||
*/
|
||||
@Deprecated
|
||||
protected final <Req extends ActionRequest, Resp> Resp performRequestAndParseEntity(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
|
@ -958,15 +1108,58 @@ public class RestHighLevelClient implements Closeable {
|
|||
response -> parseEntity(response.getEntity(), entityParser), ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a helper method for performing a request and then parsing the returned entity using the provided entityParser.
|
||||
*/
|
||||
protected final <Req extends Validatable, Resp> Resp performRequestAndParseEntity(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<XContentParser, Resp, IOException> entityParser,
|
||||
Set<Integer> ignores) throws IOException {
|
||||
return performRequest(request, requestConverter, options,
|
||||
response -> parseEntity(response.getEntity(), entityParser), ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
|
||||
* layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
|
||||
*/
|
||||
@Deprecated
|
||||
protected final <Req extends ActionRequest, Resp> Resp performRequest(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
Set<Integer> ignores) throws IOException {
|
||||
ActionRequestValidationException validationException = request.validate();
|
||||
if (validationException != null) {
|
||||
if (validationException != null && validationException.validationErrors().isEmpty() == false) {
|
||||
throw validationException;
|
||||
}
|
||||
return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a helper method for performing a request.
|
||||
*/
|
||||
protected final <Req extends Validatable, Resp> Resp performRequest(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
Set<Integer> ignores) throws IOException {
|
||||
Optional<ValidationException> validationException = request.validate();
|
||||
if (validationException != null && validationException.isPresent()) {
|
||||
throw validationException.get();
|
||||
}
|
||||
return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides common functionality for performing a request.
|
||||
*/
|
||||
private <Req, Resp> Resp internalPerformRequest(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
Set<Integer> ignores) throws IOException {
|
||||
Request req = requestConverter.apply(request);
|
||||
req.setOptions(options);
|
||||
Response response;
|
||||
|
@ -994,6 +1187,11 @@ public class RestHighLevelClient implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
|
||||
* layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
|
||||
*/
|
||||
@Deprecated
|
||||
protected final <Req extends ActionRequest, Resp> void performRequestAsyncAndParseEntity(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
|
@ -1003,16 +1201,61 @@ public class RestHighLevelClient implements Closeable {
|
|||
response -> parseEntity(response.getEntity(), entityParser), listener, ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a helper method for asynchronously performing a request.
|
||||
*/
|
||||
protected final <Req extends Validatable, Resp> void performRequestAsyncAndParseEntity(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<XContentParser, Resp, IOException> entityParser,
|
||||
ActionListener<Resp> listener, Set<Integer> ignores) {
|
||||
performRequestAsync(request, requestConverter, options,
|
||||
response -> parseEntity(response.getEntity(), entityParser), listener, ignores);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
|
||||
* layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
|
||||
*/
|
||||
@Deprecated
|
||||
protected final <Req extends ActionRequest, Resp> void performRequestAsync(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
ActionListener<Resp> listener, Set<Integer> ignores) {
|
||||
ActionRequestValidationException validationException = request.validate();
|
||||
if (validationException != null) {
|
||||
if (validationException != null && validationException.validationErrors().isEmpty() == false) {
|
||||
listener.onFailure(validationException);
|
||||
return;
|
||||
}
|
||||
internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a helper method for asynchronously performing a request.
|
||||
*/
|
||||
protected final <Req extends Validatable, Resp> void performRequestAsync(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
ActionListener<Resp> listener, Set<Integer> ignores) {
|
||||
Optional<ValidationException> validationException = request.validate();
|
||||
if (validationException != null && validationException.isPresent()) {
|
||||
listener.onFailure(validationException.get());
|
||||
return;
|
||||
}
|
||||
internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides common functionality for asynchronously performing a request.
|
||||
*/
|
||||
private <Req, Resp> void internalPerformRequestAsync(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
ActionListener<Resp> listener, Set<Integer> ignores) {
|
||||
Request req;
|
||||
try {
|
||||
req = requestConverter.apply(request);
|
||||
|
@ -1026,6 +1269,7 @@ public class RestHighLevelClient implements Closeable {
|
|||
client.performRequestAsync(req, responseListener);
|
||||
}
|
||||
|
||||
|
||||
final <Resp> ResponseListener wrapResponseListener(CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
ActionListener<Resp> actionListener, Set<Integer> ignores) {
|
||||
return new ResponseListener() {
|
||||
|
@ -1108,15 +1352,6 @@ public class RestHighLevelClient implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private static RequestOptions optionsForHeaders(Header[] headers) {
|
||||
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
|
||||
for (Header header : headers) {
|
||||
Objects.requireNonNull(header, "header cannot be null");
|
||||
options.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
return options.build();
|
||||
}
|
||||
|
||||
static boolean convertExistsResponse(Response response) {
|
||||
return response.getStatusLine().getStatusCode() == 200;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserResponse;
|
||||
import org.elasticsearch.client.security.EmptyResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
/**
|
||||
* A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Security APIs.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api.html">Security APIs on elastic.co</a>
|
||||
*/
|
||||
public final class SecurityClient {
|
||||
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
SecurityClient(RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create/update a user in the native realm synchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user's information
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the put user call
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public PutUserResponse putUser(PutUserRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::putUser, options,
|
||||
PutUserResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously create/update a user in the native realm.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user's information
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void putUserAsync(PutUserRequest request, RequestOptions options, ActionListener<PutUserResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::putUser, options,
|
||||
PutUserResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a native realm or built-in user synchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to enable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the enable user call
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public EmptyResponse enableUser(EnableUserRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::enableUser, options,
|
||||
EmptyResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a native realm or built-in user asynchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to enable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void enableUserAsync(EnableUserRequest request, RequestOptions options,
|
||||
ActionListener<EmptyResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::enableUser, options,
|
||||
EmptyResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable a native realm or built-in user synchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to disable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the enable user call
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public EmptyResponse disableUser(DisableUserRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::disableUser, options,
|
||||
EmptyResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable a native realm or built-in user asynchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to disable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void disableUserAsync(DisableUserRequest request, RequestOptions options,
|
||||
ActionListener<EmptyResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::disableUser, options,
|
||||
EmptyResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.SetUserEnabledRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
|
||||
final class SecurityRequestConverters {
|
||||
|
||||
private SecurityRequestConverters() {}
|
||||
|
||||
static Request putUser(PutUserRequest putUserRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/security/user")
|
||||
.addPathPart(putUserRequest.getUsername())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withRefreshPolicy(putUserRequest.getRefreshPolicy());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request enableUser(EnableUserRequest enableUserRequest) {
|
||||
return setUserEnabled(enableUserRequest);
|
||||
}
|
||||
|
||||
static Request disableUser(DisableUserRequest disableUserRequest) {
|
||||
return setUserEnabled(disableUserRequest);
|
||||
}
|
||||
|
||||
private static Request setUserEnabled(SetUserEnabledRequest setUserEnabledRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/security/user")
|
||||
.addPathPart(setUserEnabledRequest.getUsername())
|
||||
.addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -65,7 +65,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options,
|
||||
GetRepositoriesResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -80,7 +80,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options,
|
||||
ActionListener<GetRepositoriesResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options,
|
||||
GetRepositoriesResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ public final class SnapshotClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse createRepository(PutRepositoryRequest putRepositoryRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void createRepositoryAsync(PutRepositoryRequest putRepositoryRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -123,8 +123,8 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public AcknowledgedResponse deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository,
|
||||
options, AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -137,7 +137,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void deleteRepositoryAsync(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -152,8 +152,8 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public VerifyRepositoryResponse verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options,
|
||||
VerifyRepositoryResponse::fromXContent, emptySet());
|
||||
return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository,
|
||||
options, VerifyRepositoryResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -166,7 +166,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options,
|
||||
ActionListener<VerifyRepositoryResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository, options,
|
||||
VerifyRepositoryResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -190,7 +190,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
|
||||
ActionListener<CreateSnapshotResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -205,7 +205,7 @@ public final class SnapshotClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -219,7 +219,7 @@ public final class SnapshotClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options, ActionListener<GetSnapshotsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -234,7 +234,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options,
|
||||
SnapshotsStatusResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -248,7 +248,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options,
|
||||
ActionListener<SnapshotsStatusResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options,
|
||||
SnapshotsStatusResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -263,7 +263,7 @@ public final class SnapshotClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public RestoreSnapshotResponse restore(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options,
|
||||
RestoreSnapshotResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -278,7 +278,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void restoreAsync(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options,
|
||||
ActionListener<RestoreSnapshotResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options,
|
||||
RestoreSnapshotResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -293,7 +293,7 @@ public final class SnapshotClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, SnapshotRequestConverters::deleteSnapshot, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -308,7 +308,7 @@ public final class SnapshotClient {
|
|||
*/
|
||||
public void deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, SnapshotRequestConverters::deleteSnapshot, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,162 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class SnapshotRequestConverters {
|
||||
|
||||
static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) {
|
||||
String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories();
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories)
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout());
|
||||
parameters.withLocal(getRepositoriesRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(putRepositoryRequest.timeout());
|
||||
parameters.withVerify(putRepositoryRequest.verify());
|
||||
|
||||
request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(deleteRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(verifyRepositoryRequest.name())
|
||||
.addPathPartAsIs("_verify")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(verifyRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot")
|
||||
.addPathPart(createSnapshotRequest.repository())
|
||||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) {
|
||||
RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(getSnapshotsRequest.repository());
|
||||
String endpoint;
|
||||
if (getSnapshotsRequest.snapshots().length == 0) {
|
||||
endpoint = endpointBuilder.addPathPart("_all").build();
|
||||
} else {
|
||||
endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build();
|
||||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(snapshotsStatusRequest.repository())
|
||||
.addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots())
|
||||
.addPathPartAsIs("_status")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
|
||||
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(restoreSnapshotRequest.repository())
|
||||
.addPathPart(restoreSnapshotRequest.snapshot())
|
||||
.addPathPartAsIs("_restore")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout());
|
||||
parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(deleteSnapshotRequest.repository())
|
||||
.addPathPart(deleteSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -51,7 +51,7 @@ public final class TasksClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public ListTasksResponse list(ListTasksRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, TasksRequestConverters::listTasks, options,
|
||||
ListTasksResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,7 @@ public final class TasksClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void listAsync(ListTasksRequest request, RequestOptions options, ActionListener<ListTasksResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, TasksRequestConverters::listTasks, options,
|
||||
ListTasksResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,7 @@ public final class TasksClient {
|
|||
public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, RequestOptions options ) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
cancelTasksRequest,
|
||||
RequestConverters::cancelTasks,
|
||||
TasksRequestConverters::cancelTasks,
|
||||
options,
|
||||
CancelTasksResponse::fromXContent,
|
||||
emptySet()
|
||||
|
@ -101,7 +101,7 @@ public final class TasksClient {
|
|||
public void cancelAsync(CancelTasksRequest cancelTasksRequest, RequestOptions options, ActionListener<CancelTasksResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
cancelTasksRequest,
|
||||
RequestConverters::cancelTasks,
|
||||
TasksRequestConverters::cancelTasks,
|
||||
options,
|
||||
CancelTasksResponse::fromXContent,
|
||||
listener,
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
|
||||
public class TasksRequestConverters {
|
||||
|
||||
static Request cancelTasks(CancelTasksRequest cancelTasksRequest) {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel");
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withTimeout(cancelTasksRequest.getTimeout())
|
||||
.withTaskId(cancelTasksRequest.getTaskId())
|
||||
.withNodes(cancelTasksRequest.getNodes())
|
||||
.withParentTaskId(cancelTasksRequest.getParentTaskId())
|
||||
.withActions(cancelTasksRequest.getActions());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request listTasks(ListTasksRequest listTaskRequest) {
|
||||
if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) {
|
||||
throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
|
||||
}
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withTimeout(listTaskRequest.getTimeout())
|
||||
.withDetailed(listTaskRequest.getDetailed())
|
||||
.withWaitForCompletion(listTaskRequest.getWaitForCompletion())
|
||||
.withParentTaskId(listTaskRequest.getParentTaskId())
|
||||
.withNodes(listTaskRequest.getNodes())
|
||||
.withActions(listTaskRequest.getActions())
|
||||
.putParam("group_by", "none");
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
/**
|
||||
* A base request for any requests that supply timeouts.
|
||||
*
|
||||
* Please note, any requests that use a ackTimeout should set timeout as they
|
||||
* represent the same backing field on the server.
|
||||
*/
|
||||
public class TimedRequest implements Validatable {
|
||||
|
||||
private TimeValue timeout;
|
||||
private TimeValue masterTimeout;
|
||||
|
||||
public void setTimeout(TimeValue timeout) {
|
||||
this.timeout = timeout;
|
||||
|
||||
}
|
||||
|
||||
public void setMasterTimeout(TimeValue masterTimeout) {
|
||||
this.masterTimeout = masterTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the request timeout
|
||||
*/
|
||||
public TimeValue timeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the timeout for the request to be completed on the master node
|
||||
*/
|
||||
public TimeValue masterNodeTimeout() {
|
||||
return masterTimeout;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Defines a validation layer for Requests.
|
||||
*/
|
||||
public interface Validatable {
|
||||
/**
|
||||
* Perform validation. This method does not have to be overridden in the event that no validation needs to be done,
|
||||
* or the validation was done during object construction time. A {@link ValidationException} that is not null is
|
||||
* assumed to contain validation errors and will be thrown.
|
||||
*
|
||||
* @return An {@link Optional} {@link ValidationException} that contains a list of validation errors.
|
||||
*/
|
||||
default Optional<ValidationException> validate() {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Encapsulates an accumulation of validation errors
|
||||
*/
|
||||
public class ValidationException extends IllegalArgumentException {
|
||||
private final List<String> validationErrors = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Add a new validation error to the accumulating validation errors
|
||||
* @param error the error to add
|
||||
*/
|
||||
public void addValidationError(String error) {
|
||||
validationErrors.add(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the validation errors accumulated
|
||||
*/
|
||||
public final List<String> validationErrors() {
|
||||
return validationErrors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String getMessage() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Validation Failed: ");
|
||||
int index = 0;
|
||||
for (String error : validationErrors) {
|
||||
sb.append(++index).append(": ").append(error).append(";");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -47,7 +47,7 @@ public final class WatcherClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public PutWatchResponse putWatch(PutWatchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::putWatch, options,
|
||||
PutWatchResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ public final class WatcherClient {
|
|||
*/
|
||||
public void putWatchAsync(PutWatchRequest request, RequestOptions options,
|
||||
ActionListener<PutWatchResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::putWatch, options,
|
||||
PutWatchResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ public final class WatcherClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public DeleteWatchResponse deleteWatch(DeleteWatchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::deleteWatch, options,
|
||||
DeleteWatchResponse::fromXContent, singleton(404));
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,7 @@ public final class WatcherClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteWatchAsync(DeleteWatchRequest request, RequestOptions options, ActionListener<DeleteWatchResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deleteWatch, options,
|
||||
DeleteWatchResponse::fromXContent, listener, singleton(404));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
|
||||
public class WatcherRequestConverters {
|
||||
|
||||
static Request putWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(putWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request).withVersion(putWatchRequest.getVersion());
|
||||
if (putWatchRequest.isActive() == false) {
|
||||
params.putParam("active", "false");
|
||||
}
|
||||
ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType());
|
||||
BytesReference source = putWatchRequest.getSource();
|
||||
request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteWatch(DeleteWatchRequest deleteWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(deleteWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -56,7 +56,7 @@ public final class XPackClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackInfo, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, XPackRequestConverters::info, options,
|
||||
XPackInfoResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ public final class XPackClient {
|
|||
*/
|
||||
public void infoAsync(XPackInfoRequest request, RequestOptions options,
|
||||
ActionListener<XPackInfoResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, XPackRequestConverters::info, options,
|
||||
XPackInfoResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
|
@ -81,7 +81,7 @@ public final class XPackClient {
|
|||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xpackUsage, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, XPackRequestConverters::usage, options,
|
||||
XPackUsageResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,7 @@ public final class XPackClient {
|
|||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener<XPackUsageResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, XPackRequestConverters::usage, options,
|
||||
XPackUsageResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class XPackRequestConverters {
|
||||
|
||||
static Request info(XPackInfoRequest infoRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack");
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
request.addParameter("human", "false");
|
||||
}
|
||||
if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) {
|
||||
request.addParameter("categories", infoRequest.getCategories().stream()
|
||||
.map(c -> c.toString().toLowerCase(Locale.ROOT))
|
||||
.collect(Collectors.joining(",")));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request usage(XPackUsageRequest usageRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage");
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(usageRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Abstract class that provides a list of results and their count.
|
||||
*/
|
||||
public abstract class AbstractResultResponse<T extends ToXContent> extends ActionResponse implements ToXContentObject {
|
||||
|
||||
public static final ParseField COUNT = new ParseField("count");
|
||||
|
||||
private final ParseField resultsField;
|
||||
protected final List<T> results;
|
||||
protected final long count;
|
||||
|
||||
AbstractResultResponse(ParseField resultsField, List<T> results, long count) {
|
||||
this.resultsField = Objects.requireNonNull(resultsField,
|
||||
"[results_field] must not be null");
|
||||
this.results = Collections.unmodifiableList(results);
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(COUNT.getPreferredName(), count);
|
||||
builder.field(resultsField.getPreferredName(), results);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public long count() {
|
||||
return count;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.InvalidParameterException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to close Machine Learning Jobs
|
||||
*/
|
||||
public class CloseJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField JOB_ID = new ParseField("job_id");
|
||||
public static final ParseField TIMEOUT = new ParseField("timeout");
|
||||
public static final ParseField FORCE = new ParseField("force");
|
||||
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<CloseJobRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"close_job_request",
|
||||
true, a -> new CloseJobRequest((List<String>) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||
p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
|
||||
JOB_ID, ObjectParser.ValueType.STRING_ARRAY);
|
||||
PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT);
|
||||
PARSER.declareBoolean(CloseJobRequest::setForce, FORCE);
|
||||
PARSER.declareBoolean(CloseJobRequest::setAllowNoJobs, ALLOW_NO_JOBS);
|
||||
}
|
||||
|
||||
private static final String ALL_JOBS = "_all";
|
||||
|
||||
private final List<String> jobIds;
|
||||
private TimeValue timeout;
|
||||
private Boolean force;
|
||||
private Boolean allowNoJobs;
|
||||
|
||||
/**
|
||||
* Explicitly close all jobs
|
||||
*
|
||||
* @return a {@link CloseJobRequest} for all existing jobs
|
||||
*/
|
||||
public static CloseJobRequest closeAllJobsRequest(){
|
||||
return new CloseJobRequest(ALL_JOBS);
|
||||
}
|
||||
|
||||
CloseJobRequest(List<String> jobIds) {
|
||||
if (jobIds.isEmpty()) {
|
||||
throw new InvalidParameterException("jobIds must not be empty");
|
||||
}
|
||||
if (jobIds.stream().anyMatch(Objects::isNull)) {
|
||||
throw new NullPointerException("jobIds must not contain null values");
|
||||
}
|
||||
this.jobIds = new ArrayList<>(jobIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the specified Jobs via their unique jobIds
|
||||
*
|
||||
* @param jobIds must be non-null and non-empty and each jobId must be non-null
|
||||
*/
|
||||
public CloseJobRequest(String... jobIds) {
|
||||
this(Arrays.asList(jobIds));
|
||||
}
|
||||
|
||||
/**
|
||||
* All the jobIds to be closed
|
||||
*/
|
||||
public List<String> getJobIds() {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
public TimeValue getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* How long to wait for the close request to complete before timing out.
|
||||
*
|
||||
* @param timeout Default value: 30 minutes
|
||||
*/
|
||||
public void setTimeout(TimeValue timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public Boolean isForce() {
|
||||
return force;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should the closing be forced.
|
||||
*
|
||||
* Use to close a failed job, or to forcefully close a job which has not responded to its initial close request.
|
||||
*
|
||||
* @param force When {@code true} forcefully close the job. Defaults to {@code false}
|
||||
*/
|
||||
public void setForce(boolean force) {
|
||||
this.force = force;
|
||||
}
|
||||
|
||||
public Boolean isAllowNoJobs() {
|
||||
return this.allowNoJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* This includes `_all` string or when no jobs have been specified
|
||||
*
|
||||
* @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
|
||||
*/
|
||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||
this.allowNoJobs = allowNoJobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobIds, timeout, force, allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CloseJobRequest that = (CloseJobRequest) other;
|
||||
return Objects.equals(jobIds, that.jobIds) &&
|
||||
Objects.equals(timeout, that.timeout) &&
|
||||
Objects.equals(force, that.force) &&
|
||||
Objects.equals(allowNoJobs, that.allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
|
||||
if (timeout != null) {
|
||||
builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep());
|
||||
}
|
||||
if (force != null) {
|
||||
builder.field(FORCE.getPreferredName(), force);
|
||||
}
|
||||
if (allowNoJobs != null) {
|
||||
builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response indicating if the Job(s) closed or not
|
||||
*/
|
||||
public class CloseJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private static final ParseField CLOSED = new ParseField("closed");
|
||||
|
||||
public static final ConstructingObjectParser<CloseJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("close_job_response", true, (a) -> new CloseJobResponse((Boolean)a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), CLOSED);
|
||||
}
|
||||
|
||||
private boolean closed;
|
||||
|
||||
public CloseJobResponse(boolean closed) {
|
||||
this.closed = closed;
|
||||
}
|
||||
|
||||
public static CloseJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Has the job closed or not
|
||||
* @return boolean value indicating the job closed status
|
||||
*/
|
||||
public boolean isClosed() {
|
||||
return closed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CloseJobResponse that = (CloseJobResponse) other;
|
||||
return isClosed() == that.isClosed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(isClosed());
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CLOSED.getPreferredName(), closed);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -16,13 +16,16 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml;
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to delete a Machine Learning Job via its ID
|
||||
*/
|
||||
public class DeleteJobRequest extends ActionRequest {
|
||||
|
||||
private String jobId;
|
||||
|
@ -36,6 +39,10 @@ public class DeleteJobRequest extends ActionRequest {
|
|||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The jobId which to delete
|
||||
* @param jobId unique jobId to delete, must not be null
|
||||
*/
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
|
||||
}
|
||||
|
@ -44,6 +51,12 @@ public class DeleteJobRequest extends ActionRequest {
|
|||
return force;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to forcefully delete an opened job.
|
||||
* This method is quicker than closing and deleting the job.
|
||||
*
|
||||
* @param force When {@code true} forcefully delete an opened job. Defaults to {@code false}
|
||||
*/
|
||||
public void setForce(boolean force) {
|
||||
this.force = force;
|
||||
}
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml;
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -24,6 +24,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response acknowledging the Machine Learning Job request
|
||||
*/
|
||||
public class DeleteJobResponse extends AcknowledgedResponse {
|
||||
|
||||
public DeleteJobResponse(boolean acknowledged) {
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request object to flush a given Machine Learning job.
|
||||
*/
|
||||
public class FlushJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
|
||||
public static final ParseField SKIP_TIME = new ParseField("skip_time");
|
||||
|
||||
public static final ConstructingObjectParser<FlushJobRequest, Void> PARSER =
|
||||
new ConstructingObjectParser<>("flush_job_request", (a) -> new FlushJobRequest((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM);
|
||||
PARSER.declareString(FlushJobRequest::setStart, START);
|
||||
PARSER.declareString(FlushJobRequest::setEnd, END);
|
||||
PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME);
|
||||
PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private Boolean calcInterim;
|
||||
private String start;
|
||||
private String end;
|
||||
private String advanceTime;
|
||||
private String skipTime;
|
||||
|
||||
/**
|
||||
* Create new Flush job request
|
||||
*
|
||||
* @param jobId The job ID of the job to flush
|
||||
*/
|
||||
public FlushJobRequest(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public boolean getCalcInterim() {
|
||||
return calcInterim;
|
||||
}
|
||||
|
||||
/**
|
||||
* When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period.
|
||||
*
|
||||
* @param calcInterim defaults to {@code false}.
|
||||
*/
|
||||
public void setCalcInterim(boolean calcInterim) {
|
||||
this.calcInterim = calcInterim;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
/**
|
||||
* When used in conjunction with {@link FlushJobRequest#calcInterim},
|
||||
* specifies the start of the range of buckets on which to calculate interim results.
|
||||
*
|
||||
* @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
/**
|
||||
* When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range
|
||||
* of buckets on which to calculate interim results
|
||||
*
|
||||
* @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getAdvanceTime() {
|
||||
return advanceTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies to advance to a particular time value.
|
||||
* Results are generated and the model is updated for data from the specified time interval.
|
||||
*
|
||||
* @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setAdvanceTime(String advanceTime) {
|
||||
this.advanceTime = advanceTime;
|
||||
}
|
||||
|
||||
public String getSkipTime() {
|
||||
return skipTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies to skip to a particular time value.
|
||||
* Results are not generated and the model is not updated for data from the specified time interval.
|
||||
*
|
||||
* @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setSkipTime(String skipTime) {
|
||||
this.skipTime = skipTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
FlushJobRequest other = (FlushJobRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
calcInterim == other.calcInterim &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(advanceTime, other.advanceTime) &&
|
||||
Objects.equals(skipTime, other.skipTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (calcInterim != null) {
|
||||
builder.field(CALC_INTERIM.getPreferredName(), calcInterim);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (advanceTime != null) {
|
||||
builder.field(ADVANCE_TIME.getPreferredName(), advanceTime);
|
||||
}
|
||||
if (skipTime != null) {
|
||||
builder.field(SKIP_TIME.getPreferredName(), skipTime);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,112 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response object containing flush acknowledgement and additional data
|
||||
*/
|
||||
public class FlushJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
public static final ParseField FLUSHED = new ParseField("flushed");
|
||||
public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end");
|
||||
|
||||
public static final ConstructingObjectParser<FlushJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("flush_job_response",
|
||||
true,
|
||||
(a) -> {
|
||||
boolean flushed = (boolean) a[0];
|
||||
Date date = a[1] == null ? null : new Date((long) a[1]);
|
||||
return new FlushJobResponse(flushed, date);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END);
|
||||
}
|
||||
|
||||
public static FlushJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private final boolean flushed;
|
||||
private final Date lastFinalizedBucketEnd;
|
||||
|
||||
public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) {
|
||||
this.flushed = flushed;
|
||||
this.lastFinalizedBucketEnd = lastFinalizedBucketEnd;
|
||||
}
|
||||
|
||||
/**
|
||||
* Was the job successfully flushed or not
|
||||
*/
|
||||
public boolean isFlushed() {
|
||||
return flushed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
||||
*/
|
||||
@Nullable
|
||||
public Date getLastFinalizedBucketEnd() {
|
||||
return lastFinalizedBucketEnd;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(flushed, lastFinalizedBucketEnd);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
FlushJobResponse that = (FlushJobResponse) other;
|
||||
return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FLUSHED.getPreferredName(), flushed);
|
||||
if (lastFinalizedBucketEnd != null) {
|
||||
builder.timeField(LAST_FINALIZED_BUCKET_END.getPreferredName(),
|
||||
LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.getTime());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Pojo for forecasting an existing and open Machine Learning Job
|
||||
*/
|
||||
public class ForecastJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField DURATION = new ParseField("duration");
|
||||
public static final ParseField EXPIRES_IN = new ParseField("expires_in");
|
||||
|
||||
public static final ConstructingObjectParser<ForecastJobRequest, Void> PARSER =
|
||||
new ConstructingObjectParser<>("forecast_job_request", (a) -> new ForecastJobRequest((String)a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareString(
|
||||
(request, val) -> request.setDuration(TimeValue.parseTimeValue(val, DURATION.getPreferredName())), DURATION);
|
||||
PARSER.declareString(
|
||||
(request, val) -> request.setExpiresIn(TimeValue.parseTimeValue(val, EXPIRES_IN.getPreferredName())), EXPIRES_IN);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private TimeValue duration;
|
||||
private TimeValue expiresIn;
|
||||
|
||||
/**
|
||||
* A new forecast request
|
||||
*
|
||||
* @param jobId the non-null, existing, and opened jobId to forecast
|
||||
*/
|
||||
public ForecastJobRequest(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public TimeValue getDuration() {
|
||||
return duration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the forecast duration
|
||||
*
|
||||
* A period of time that indicates how far into the future to forecast.
|
||||
* The default value is 1 day. The forecast starts at the last record that was processed.
|
||||
*
|
||||
* @param duration TimeValue for the duration of the forecast
|
||||
*/
|
||||
public void setDuration(TimeValue duration) {
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
public TimeValue getExpiresIn() {
|
||||
return expiresIn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the forecast expiration
|
||||
*
|
||||
* The period of time that forecast results are retained.
|
||||
* After a forecast expires, the results are deleted. The default value is 14 days.
|
||||
* If set to a value of 0, the forecast is never automatically deleted.
|
||||
*
|
||||
* @param expiresIn TimeValue for the forecast expiration
|
||||
*/
|
||||
public void setExpiresIn(TimeValue expiresIn) {
|
||||
this.expiresIn = expiresIn;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, duration, expiresIn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ForecastJobRequest other = (ForecastJobRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId)
|
||||
&& Objects.equals(duration, other.duration)
|
||||
&& Objects.equals(expiresIn, other.expiresIn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (duration != null) {
|
||||
builder.field(DURATION.getPreferredName(), duration.getStringRep());
|
||||
}
|
||||
if (expiresIn != null) {
|
||||
builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Forecast response object
|
||||
*/
|
||||
public class ForecastJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
||||
public static final ParseField FORECAST_ID = new ParseField("forecast_id");
|
||||
|
||||
public static final ConstructingObjectParser<ForecastJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("forecast_job_response",
|
||||
true,
|
||||
(a) -> new ForecastJobResponse((Boolean)a[0], (String)a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID);
|
||||
}
|
||||
|
||||
public static ForecastJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private final boolean acknowledged;
|
||||
private final String forecastId;
|
||||
|
||||
public ForecastJobResponse(boolean acknowledged, String forecastId) {
|
||||
this.acknowledged = acknowledged;
|
||||
this.forecastId = forecastId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Forecast creating acknowledgement
|
||||
* @return {@code true} indicates success, {@code false} otherwise
|
||||
*/
|
||||
public boolean isAcknowledged() {
|
||||
return acknowledged;
|
||||
}
|
||||
|
||||
/**
|
||||
* The created forecast ID
|
||||
*/
|
||||
public String getForecastId() {
|
||||
return forecastId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(acknowledged, forecastId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ForecastJobResponse other = (ForecastJobResponse) obj;
|
||||
return Objects.equals(acknowledged, other.acknowledged)
|
||||
&& Objects.equals(forecastId, other.forecastId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged);
|
||||
builder.field(FORECAST_ID.getPreferredName(), forecastId);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,267 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.client.ml.job.results.Result;
|
||||
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to retrieve buckets of a given job
|
||||
*/
|
||||
public class GetBucketsRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField EXPAND = new ParseField("expand");
|
||||
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField SORT = new ParseField("sort");
|
||||
public static final ParseField DESCENDING = new ParseField("desc");
|
||||
|
||||
public static final ObjectParser<GetBucketsRequest, Void> PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP);
|
||||
PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND);
|
||||
PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM);
|
||||
PARSER.declareStringOrNull(GetBucketsRequest::setStart, START);
|
||||
PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END);
|
||||
PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE);
|
||||
PARSER.declareString(GetBucketsRequest::setSort, SORT);
|
||||
PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING);
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String timestamp;
|
||||
private Boolean expand;
|
||||
private Boolean excludeInterim;
|
||||
private String start;
|
||||
private String end;
|
||||
private PageParams pageParams;
|
||||
private Double anomalyScore;
|
||||
private String sort;
|
||||
private Boolean descending;
|
||||
|
||||
private GetBucketsRequest() {}
|
||||
|
||||
/**
|
||||
* Constructs a request to retrieve buckets of a given job
|
||||
* @param jobId id of the job to retrieve buckets of
|
||||
*/
|
||||
public GetBucketsRequest(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the timestamp of a specific bucket to be retrieved.
|
||||
* @param timestamp String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setTimestamp(String timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
public String getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public boolean isExpand() {
|
||||
return expand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "expand".
|
||||
* When {@code true}, buckets will be expanded to include their records.
|
||||
* @param expand value of "expand" to be set
|
||||
*/
|
||||
public void setExpand(Boolean expand) {
|
||||
this.expand = expand;
|
||||
}
|
||||
|
||||
public Boolean isExcludeInterim() {
|
||||
return excludeInterim;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "exclude_interim".
|
||||
* When {@code true}, interim buckets will be filtered out.
|
||||
* @param excludeInterim value of "exclude_interim" to be set
|
||||
*/
|
||||
public void setExcludeInterim(Boolean excludeInterim) {
|
||||
this.excludeInterim = excludeInterim;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "start" which is a timestamp.
|
||||
* Only buckets whose timestamp is on or after the "start" value will be returned.
|
||||
* @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "end" which is a timestamp.
|
||||
* Only buckets whose timestamp is before the "end" value will be returned.
|
||||
* @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the paging parameters
|
||||
* @param pageParams the paging parameters
|
||||
*/
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
|
||||
public Double getAnomalyScore() {
|
||||
return anomalyScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "anomaly_score".
|
||||
* Only buckets with "anomaly_score" equal or greater will be returned.
|
||||
* @param anomalyScore value of "anomaly_score".
|
||||
*/
|
||||
public void setAnomalyScore(Double anomalyScore) {
|
||||
this.anomalyScore = anomalyScore;
|
||||
}
|
||||
|
||||
public String getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "sort".
|
||||
* Specifies the bucket field to sort on.
|
||||
* @param sort value of "sort".
|
||||
*/
|
||||
public void setSort(String sort) {
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
public Boolean isDescending() {
|
||||
return descending;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "desc".
|
||||
* Specifies the sorting order.
|
||||
* @param descending value of "desc"
|
||||
*/
|
||||
public void setDescending(boolean descending) {
|
||||
this.descending = descending;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (timestamp != null) {
|
||||
builder.field(Result.TIMESTAMP.getPreferredName(), timestamp);
|
||||
}
|
||||
if (expand != null) {
|
||||
builder.field(EXPAND.getPreferredName(), expand);
|
||||
}
|
||||
if (excludeInterim != null) {
|
||||
builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (pageParams != null) {
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
}
|
||||
if (anomalyScore != null) {
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||
}
|
||||
if (sort != null) {
|
||||
builder.field(SORT.getPreferredName(), sort);
|
||||
}
|
||||
if (descending != null) {
|
||||
builder.field(DESCENDING.getPreferredName(), descending);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetBucketsRequest other = (GetBucketsRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(timestamp, other.timestamp) &&
|
||||
Objects.equals(expand, other.expand) &&
|
||||
Objects.equals(excludeInterim, other.excludeInterim) &&
|
||||
Objects.equals(anomalyScore, other.anomalyScore) &&
|
||||
Objects.equals(pageParams, other.pageParams) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(sort, other.sort) &&
|
||||
Objects.equals(descending, other.descending);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.job.results.Bucket;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A response containing the requested buckets
|
||||
*/
|
||||
public class GetBucketsResponse extends AbstractResultResponse<Bucket> {
|
||||
|
||||
public static final ParseField BUCKETS = new ParseField("buckets");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetBucketsResponse, Void> PARSER = new ConstructingObjectParser<>("get_buckets_response",
|
||||
true, a -> new GetBucketsResponse((List<Bucket>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
|
||||
}
|
||||
|
||||
public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
GetBucketsResponse(List<Bucket> buckets, long count) {
|
||||
super(BUCKETS, buckets, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The retrieved buckets
|
||||
* @return the retrieved buckets
|
||||
*/
|
||||
public List<Bucket> buckets() {
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(count, results);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetBucketsResponse other = (GetBucketsResponse) obj;
|
||||
return count == other.count && Objects.equals(results, other.results);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,227 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to retrieve influencers of a given job
|
||||
*/
|
||||
public class GetInfluencersRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score");
|
||||
public static final ParseField SORT = new ParseField("sort");
|
||||
public static final ParseField DESCENDING = new ParseField("desc");
|
||||
|
||||
public static final ConstructingObjectParser<GetInfluencersRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_influencers_request", a -> new GetInfluencersRequest((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareBoolean(GetInfluencersRequest::setExcludeInterim, EXCLUDE_INTERIM);
|
||||
PARSER.declareStringOrNull(GetInfluencersRequest::setStart, START);
|
||||
PARSER.declareStringOrNull(GetInfluencersRequest::setEnd, END);
|
||||
PARSER.declareObject(GetInfluencersRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(GetInfluencersRequest::setInfluencerScore, INFLUENCER_SCORE);
|
||||
PARSER.declareString(GetInfluencersRequest::setSort, SORT);
|
||||
PARSER.declareBoolean(GetInfluencersRequest::setDescending, DESCENDING);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private Boolean excludeInterim;
|
||||
private String start;
|
||||
private String end;
|
||||
private Double influencerScore;
|
||||
private PageParams pageParams;
|
||||
private String sort;
|
||||
private Boolean descending;
|
||||
|
||||
/**
|
||||
* Constructs a request to retrieve influencers of a given job
|
||||
* @param jobId id of the job to retrieve influencers of
|
||||
*/
|
||||
public GetInfluencersRequest(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public Boolean isExcludeInterim() {
|
||||
return excludeInterim;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "exclude_interim".
|
||||
* When {@code true}, interim influencers will be filtered out.
|
||||
* @param excludeInterim value of "exclude_interim" to be set
|
||||
*/
|
||||
public void setExcludeInterim(Boolean excludeInterim) {
|
||||
this.excludeInterim = excludeInterim;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "start" which is a timestamp.
|
||||
* Only influencers whose timestamp is on or after the "start" value will be returned.
|
||||
* @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "end" which is a timestamp.
|
||||
* Only influencers whose timestamp is before the "end" value will be returned.
|
||||
* @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the paging parameters
|
||||
* @param pageParams The paging parameters
|
||||
*/
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
|
||||
public Double getInfluencerScore() {
|
||||
return influencerScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "influencer_score".
|
||||
* Only influencers with "influencer_score" equal or greater will be returned.
|
||||
* @param influencerScore value of "influencer_score".
|
||||
*/
|
||||
public void setInfluencerScore(Double influencerScore) {
|
||||
this.influencerScore = influencerScore;
|
||||
}
|
||||
|
||||
public String getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "sort".
|
||||
* Specifies the influencer field to sort on.
|
||||
* @param sort value of "sort".
|
||||
*/
|
||||
public void setSort(String sort) {
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
public Boolean isDescending() {
|
||||
return descending;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "desc".
|
||||
* Specifies the sorting order.
|
||||
* @param descending value of "desc"
|
||||
*/
|
||||
public void setDescending(Boolean descending) {
|
||||
this.descending = descending;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (excludeInterim != null) {
|
||||
builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (pageParams != null) {
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
}
|
||||
if (influencerScore != null) {
|
||||
builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore);
|
||||
}
|
||||
if (sort != null) {
|
||||
builder.field(SORT.getPreferredName(), sort);
|
||||
}
|
||||
if (descending != null) {
|
||||
builder.field(DESCENDING.getPreferredName(), descending);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, excludeInterim, influencerScore, pageParams, start, end, sort, descending);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetInfluencersRequest other = (GetInfluencersRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(excludeInterim, other.excludeInterim) &&
|
||||
Objects.equals(influencerScore, other.influencerScore) &&
|
||||
Objects.equals(pageParams, other.pageParams) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(sort, other.sort) &&
|
||||
Objects.equals(descending, other.descending);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.job.results.Influencer;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A response containing the requested influencers
|
||||
*/
|
||||
public class GetInfluencersResponse extends AbstractResultResponse<Influencer> {
|
||||
|
||||
public static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetInfluencersResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_influencers_response", true, a -> new GetInfluencersResponse((List<Influencer>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Influencer.PARSER, INFLUENCERS);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
|
||||
}
|
||||
|
||||
public static GetInfluencersResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
GetInfluencersResponse(List<Influencer> influencers, long count) {
|
||||
super(INFLUENCERS, influencers, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The retrieved influencers
|
||||
* @return the retrieved influencers
|
||||
*/
|
||||
public List<Influencer> influencers() {
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(count, results);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetInfluencersResponse other = (GetInfluencersResponse) obj;
|
||||
return count == other.count && Objects.equals(results, other.results);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request object to get {@link Job} objects with the matching `jobId`s or
|
||||
* `groupName`s.
|
||||
*
|
||||
* `_all` explicitly gets all the jobs in the cluster
|
||||
* An empty request (no `jobId`s) implicitly gets all the jobs in the cluster
|
||||
*/
|
||||
public class GetJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField JOB_IDS = new ParseField("job_ids");
|
||||
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
|
||||
|
||||
private static final String ALL_JOBS = "_all";
|
||||
private final List<String> jobIds;
|
||||
private Boolean allowNoJobs;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetJobRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_job_request",
|
||||
true, a -> new GetJobRequest(a[0] == null ? new ArrayList<>() : (List<String>) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), JOB_IDS);
|
||||
PARSER.declareBoolean(GetJobRequest::setAllowNoJobs, ALLOW_NO_JOBS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to create a query that will get ALL jobs
|
||||
* @return new {@link GetJobRequest} object searching for the jobId "_all"
|
||||
*/
|
||||
public static GetJobRequest getAllJobsRequest() {
|
||||
return new GetJobRequest(ALL_JOBS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the specified {@link Job} configurations via their unique jobIds
|
||||
* @param jobIds must not contain any null values
|
||||
*/
|
||||
public GetJobRequest(String... jobIds) {
|
||||
this(Arrays.asList(jobIds));
|
||||
}
|
||||
|
||||
GetJobRequest(List<String> jobIds) {
|
||||
if (jobIds.stream().anyMatch(Objects::isNull)) {
|
||||
throw new NullPointerException("jobIds must not contain null values");
|
||||
}
|
||||
this.jobIds = new ArrayList<>(jobIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* All the jobIds for which to get configuration information
|
||||
*/
|
||||
public List<String> getJobIds() {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* @param allowNoJobs If this is {@code false}, then an error is returned when a wildcard (or `_all`) does not match any jobs
|
||||
*/
|
||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||
this.allowNoJobs = allowNoJobs;
|
||||
}
|
||||
|
||||
public Boolean isAllowNoJobs() {
|
||||
return allowNoJobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobIds, allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || other.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetJobRequest that = (GetJobRequest) other;
|
||||
return Objects.equals(jobIds, that.jobIds) &&
|
||||
Objects.equals(allowNoJobs, that.allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
||||
if (jobIds.isEmpty() == false) {
|
||||
builder.field(JOB_IDS.getPreferredName(), jobIds);
|
||||
}
|
||||
|
||||
if (allowNoJobs != null) {
|
||||
builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* Contains a {@link List} of the found {@link Job} objects and the total count found
|
||||
*/
|
||||
public class GetJobResponse extends AbstractResultResponse<Job> {
|
||||
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("jobs");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("jobs_response", true,
|
||||
a -> new GetJobResponse((List<Job.Builder>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), Job.PARSER, RESULTS_FIELD);
|
||||
PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT);
|
||||
}
|
||||
|
||||
GetJobResponse(List<Job.Builder> jobBuilders, long count) {
|
||||
super(RESULTS_FIELD, jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The collection of {@link Job} objects found in the query
|
||||
*/
|
||||
public List<Job> jobs() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public static GetJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(results, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetJobResponse other = (GetJobResponse) obj;
|
||||
return Objects.equals(results, other.results) && count == other.count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,146 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
/**
|
||||
* Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
|
||||
*
|
||||
* `_all` explicitly gets all the jobs' statistics in the cluster
|
||||
* An empty request (no `jobId`s) implicitly gets all the jobs' statistics in the cluster
|
||||
*/
|
||||
public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetJobStatsRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_jobs_stats_request", a -> new GetJobStatsRequest((List<String>) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||
p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
|
||||
Job.ID, ObjectParser.ValueType.STRING_ARRAY);
|
||||
PARSER.declareBoolean(GetJobStatsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
|
||||
}
|
||||
|
||||
private static final String ALL_JOBS = "_all";
|
||||
|
||||
private final List<String> jobIds;
|
||||
private Boolean allowNoJobs;
|
||||
|
||||
/**
|
||||
* Explicitly gets all jobs statistics
|
||||
*
|
||||
* @return a {@link GetJobStatsRequest} for all existing jobs
|
||||
*/
|
||||
public static GetJobStatsRequest getAllJobStatsRequest(){
|
||||
return new GetJobStatsRequest(ALL_JOBS);
|
||||
}
|
||||
|
||||
GetJobStatsRequest(List<String> jobIds) {
|
||||
if (jobIds.stream().anyMatch(Objects::isNull)) {
|
||||
throw new NullPointerException("jobIds must not contain null values");
|
||||
}
|
||||
this.jobIds = new ArrayList<>(jobIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the specified Job's statistics via their unique jobIds
|
||||
*
|
||||
* @param jobIds must be non-null and each jobId must be non-null
|
||||
*/
|
||||
public GetJobStatsRequest(String... jobIds) {
|
||||
this(Arrays.asList(jobIds));
|
||||
}
|
||||
|
||||
/**
|
||||
* All the jobIds for which to get statistics
|
||||
*/
|
||||
public List<String> getJobIds() {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
public Boolean isAllowNoJobs() {
|
||||
return this.allowNoJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* This includes `_all` string or when no jobs have been specified
|
||||
*
|
||||
* @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
|
||||
*/
|
||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||
this.allowNoJobs = allowNoJobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobIds, allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetJobStatsRequest that = (GetJobStatsRequest) other;
|
||||
return Objects.equals(jobIds, that.jobIds) &&
|
||||
Objects.equals(allowNoJobs, that.allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
|
||||
if (allowNoJobs != null) {
|
||||
builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* Contains a {@link List} of the found {@link JobStats} objects and the total count found
|
||||
*/
|
||||
public class GetJobStatsResponse extends AbstractResultResponse<JobStats> {
|
||||
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("jobs");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetJobStatsResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("jobs_stats_response", true,
|
||||
a -> new GetJobStatsResponse((List<JobStats>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD);
|
||||
PARSER.declareLong(constructorArg(), COUNT);
|
||||
}
|
||||
|
||||
GetJobStatsResponse(List<JobStats> jobStats, long count) {
|
||||
super(RESULTS_FIELD, jobStats, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The collection of {@link JobStats} objects found in the query
|
||||
*/
|
||||
public List<JobStats> jobStats() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(results, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetJobStatsResponse other = (GetJobStatsResponse) obj;
|
||||
return Objects.equals(results, other.results) && count == other.count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,266 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to retrieve overall buckets of set of jobs
|
||||
*/
|
||||
public class GetOverallBucketsRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField TOP_N = new ParseField("top_n");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
public static final ParseField OVERALL_SCORE = new ParseField("overall_score");
|
||||
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
|
||||
|
||||
private static final String ALL_JOBS = "_all";
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetOverallBucketsRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_overall_buckets_request", a -> new GetOverallBucketsRequest((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareInt(GetOverallBucketsRequest::setTopN, TOP_N);
|
||||
PARSER.declareString(GetOverallBucketsRequest::setBucketSpan, BUCKET_SPAN);
|
||||
PARSER.declareBoolean(GetOverallBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM);
|
||||
PARSER.declareDouble(GetOverallBucketsRequest::setOverallScore, OVERALL_SCORE);
|
||||
PARSER.declareStringOrNull(GetOverallBucketsRequest::setStart, START);
|
||||
PARSER.declareStringOrNull(GetOverallBucketsRequest::setEnd, END);
|
||||
PARSER.declareBoolean(GetOverallBucketsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
|
||||
}
|
||||
|
||||
private final List<String> jobIds;
|
||||
private Integer topN;
|
||||
private TimeValue bucketSpan;
|
||||
private Boolean excludeInterim;
|
||||
private Double overallScore;
|
||||
private String start;
|
||||
private String end;
|
||||
private Boolean allowNoJobs;
|
||||
|
||||
private GetOverallBucketsRequest(String jobId) {
|
||||
this(Strings.tokenizeToStringArray(jobId, ","));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a request to retrieve overall buckets for a set of jobs
|
||||
* @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression.
|
||||
*/
|
||||
public GetOverallBucketsRequest(String... jobIds) {
|
||||
this(Arrays.asList(jobIds));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a request to retrieve overall buckets for a set of jobs
|
||||
* @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression.
|
||||
*/
|
||||
public GetOverallBucketsRequest(List<String> jobIds) {
|
||||
if (jobIds.stream().anyMatch(Objects::isNull)) {
|
||||
throw new NullPointerException("jobIds must not contain null values");
|
||||
}
|
||||
if (jobIds.isEmpty()) {
|
||||
this.jobIds = Collections.singletonList(ALL_JOBS);
|
||||
} else {
|
||||
this.jobIds = Collections.unmodifiableList(jobIds);
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> getJobIds() {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
public Integer getTopN() {
|
||||
return topN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of `top_n`.
|
||||
* @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1.
|
||||
*/
|
||||
public void setTopN(Integer topN) {
|
||||
this.topN = topN;
|
||||
}
|
||||
|
||||
public TimeValue getBucketSpan() {
|
||||
return bucketSpan;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of `bucket_span`.
|
||||
* @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span.
|
||||
* Defaults to the largest job’s bucket_span.
|
||||
*/
|
||||
public void setBucketSpan(TimeValue bucketSpan) {
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
private void setBucketSpan(String bucketSpan) {
|
||||
this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName());
|
||||
}
|
||||
|
||||
public boolean isExcludeInterim() {
|
||||
return excludeInterim;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "exclude_interim".
|
||||
* When {@code true}, interim overall buckets will be filtered out.
|
||||
* Overall buckets are interim if any of the job buckets within the overall bucket interval are interim.
|
||||
* @param excludeInterim value of "exclude_interim" to be set
|
||||
*/
|
||||
public void setExcludeInterim(Boolean excludeInterim) {
|
||||
this.excludeInterim = excludeInterim;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "start" which is a timestamp.
|
||||
* Only overall buckets whose timestamp is on or after the "start" value will be returned.
|
||||
* @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "end" which is a timestamp.
|
||||
* Only overall buckets whose timestamp is before the "end" value will be returned.
|
||||
* @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public Double getOverallScore() {
|
||||
return overallScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "overall_score".
|
||||
* Only buckets with "overall_score" equal or greater will be returned.
|
||||
* @param overallScore value of "anomaly_score".
|
||||
*/
|
||||
public void setOverallScore(double overallScore) {
|
||||
this.overallScore = overallScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* See {@link GetJobRequest#isAllowNoJobs()}
|
||||
* @param allowNoJobs value of "allow_no_jobs".
|
||||
*/
|
||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||
this.allowNoJobs = allowNoJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* If this is `false`, then an error is returned when a wildcard (or `_all`) does not match any jobs
|
||||
*/
|
||||
public Boolean isAllowNoJobs() {
|
||||
return allowNoJobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
||||
if (jobIds.isEmpty() == false) {
|
||||
builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
|
||||
}
|
||||
if (topN != null) {
|
||||
builder.field(TOP_N.getPreferredName(), topN);
|
||||
}
|
||||
if (bucketSpan != null) {
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep());
|
||||
}
|
||||
if (excludeInterim != null) {
|
||||
builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (overallScore != null) {
|
||||
builder.field(OVERALL_SCORE.getPreferredName(), overallScore);
|
||||
}
|
||||
if (allowNoJobs != null) {
|
||||
builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobIds, topN, bucketSpan, excludeInterim, overallScore, start, end, allowNoJobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetOverallBucketsRequest other = (GetOverallBucketsRequest) obj;
|
||||
return Objects.equals(jobIds, other.jobIds) &&
|
||||
Objects.equals(topN, other.topN) &&
|
||||
Objects.equals(bucketSpan, other.bucketSpan) &&
|
||||
Objects.equals(excludeInterim, other.excludeInterim) &&
|
||||
Objects.equals(overallScore, other.overallScore) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(allowNoJobs, other.allowNoJobs);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.job.results.OverallBucket;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A response containing the requested overall buckets
|
||||
*/
|
||||
public class GetOverallBucketsResponse extends AbstractResultResponse<OverallBucket> {
|
||||
|
||||
public static final ParseField OVERALL_BUCKETS = new ParseField("overall_buckets");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetOverallBucketsResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_overall_buckets_response", true, a -> new GetOverallBucketsResponse((List<OverallBucket>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), OverallBucket.PARSER, OVERALL_BUCKETS);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
|
||||
}
|
||||
|
||||
public static GetOverallBucketsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
GetOverallBucketsResponse(List<OverallBucket> overallBuckets, long count) {
|
||||
super(OVERALL_BUCKETS, overallBuckets, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The retrieved overall buckets
|
||||
* @return the retrieved overall buckets
|
||||
*/
|
||||
public List<OverallBucket> overallBuckets() {
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(count, results);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetOverallBucketsResponse other = (GetOverallBucketsResponse) obj;
|
||||
return count == other.count && Objects.equals(results, other.results);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,222 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to retrieve records of a given job
|
||||
*/
|
||||
public class GetRecordsRequest implements ToXContentObject, Validatable {
|
||||
|
||||
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField RECORD_SCORE = new ParseField("record_score");
|
||||
public static final ParseField SORT = new ParseField("sort");
|
||||
public static final ParseField DESCENDING = new ParseField("desc");
|
||||
|
||||
public static final ObjectParser<GetRecordsRequest, Void> PARSER = new ObjectParser<>("get_records_request", GetRecordsRequest::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareBoolean(GetRecordsRequest::setExcludeInterim, EXCLUDE_INTERIM);
|
||||
PARSER.declareStringOrNull(GetRecordsRequest::setStart, START);
|
||||
PARSER.declareStringOrNull(GetRecordsRequest::setEnd, END);
|
||||
PARSER.declareObject(GetRecordsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(GetRecordsRequest::setRecordScore, RECORD_SCORE);
|
||||
PARSER.declareString(GetRecordsRequest::setSort, SORT);
|
||||
PARSER.declareBoolean(GetRecordsRequest::setDescending, DESCENDING);
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private Boolean excludeInterim;
|
||||
private String start;
|
||||
private String end;
|
||||
private PageParams pageParams;
|
||||
private Double recordScore;
|
||||
private String sort;
|
||||
private Boolean descending;
|
||||
|
||||
private GetRecordsRequest() {}
|
||||
|
||||
/**
|
||||
* Constructs a request to retrieve records of a given job
|
||||
* @param jobId id of the job to retrieve records of
|
||||
*/
|
||||
public GetRecordsRequest(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public Boolean isExcludeInterim() {
|
||||
return excludeInterim;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "exclude_interim".
|
||||
* When {@code true}, interim records will be filtered out.
|
||||
* @param excludeInterim value of "exclude_interim" to be set
|
||||
*/
|
||||
public void setExcludeInterim(Boolean excludeInterim) {
|
||||
this.excludeInterim = excludeInterim;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "start" which is a timestamp.
|
||||
* Only records whose timestamp is on or after the "start" value will be returned.
|
||||
* @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "end" which is a timestamp.
|
||||
* Only records whose timestamp is before the "end" value will be returned.
|
||||
* @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||
*/
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the paging parameters
|
||||
* @param pageParams The paging parameters
|
||||
*/
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
|
||||
public Double getRecordScore() {
|
||||
return recordScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "record_score".
|
||||
* Only records with "record_score" equal or greater will be returned.
|
||||
* @param recordScore value of "record_score".
|
||||
*/
|
||||
public void setRecordScore(Double recordScore) {
|
||||
this.recordScore = recordScore;
|
||||
}
|
||||
|
||||
public String getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "sort".
|
||||
* Specifies the record field to sort on.
|
||||
* @param sort value of "sort".
|
||||
*/
|
||||
public void setSort(String sort) {
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
public Boolean isDescending() {
|
||||
return descending;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of "desc".
|
||||
* Specifies the sorting order.
|
||||
* @param descending value of "desc"
|
||||
*/
|
||||
public void setDescending(Boolean descending) {
|
||||
this.descending = descending;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (excludeInterim != null) {
|
||||
builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (pageParams != null) {
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
}
|
||||
if (recordScore != null) {
|
||||
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
|
||||
}
|
||||
if (sort != null) {
|
||||
builder.field(SORT.getPreferredName(), sort);
|
||||
}
|
||||
if (descending != null) {
|
||||
builder.field(DESCENDING.getPreferredName(), descending);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, excludeInterim, recordScore, pageParams, start, end, sort, descending);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetRecordsRequest other = (GetRecordsRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(excludeInterim, other.excludeInterim) &&
|
||||
Objects.equals(recordScore, other.recordScore) &&
|
||||
Objects.equals(pageParams, other.pageParams) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(sort, other.sort) &&
|
||||
Objects.equals(descending, other.descending);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.job.results.AnomalyRecord;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A response containing the requested records
|
||||
*/
|
||||
public class GetRecordsResponse extends AbstractResultResponse<AnomalyRecord> {
|
||||
|
||||
public static final ParseField RECORDS = new ParseField("records");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<GetRecordsResponse, Void> PARSER = new ConstructingObjectParser<>("get_records_response",
|
||||
true, a -> new GetRecordsResponse((List<AnomalyRecord>) a[0], (long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARSER, RECORDS);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
|
||||
}
|
||||
|
||||
public static GetRecordsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
GetRecordsResponse(List<AnomalyRecord> records, long count) {
|
||||
super(RECORDS, records, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* The retrieved records
|
||||
* @return the retrieved records
|
||||
*/
|
||||
public List<AnomalyRecord> records() {
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(count, results);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetRecordsResponse other = (GetRecordsResponse) obj;
|
||||
return count == other.count && Objects.equals(results, other.results);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue