Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
e9cd0556c1
|
@ -7,3 +7,4 @@
|
|||
|
||||
ES_BUILD_JAVA:
|
||||
- java10
|
||||
- java11
|
||||
|
|
|
@ -8,3 +8,4 @@
|
|||
ES_RUNTIME_JAVA:
|
||||
- java8
|
||||
- java10
|
||||
- java11
|
||||
|
|
|
@ -30,7 +30,7 @@ buildscript {
|
|||
|
||||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
// order of this seciont matters, see: https://github.com/johnrengelman/shadow/issues/336
|
||||
// order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336
|
||||
apply plugin: 'application' // have the shadow plugin provide the runShadow task
|
||||
mainClassName = 'org.openjdk.jmh.Main'
|
||||
apply plugin: 'com.github.johnrengelman.shadow' // build an uberjar with all benchmarks
|
||||
|
|
62
build.gradle
62
build.gradle
|
@ -125,7 +125,10 @@ Map<String, String> buildMetadataMap = buildMetadataValue.tokenize(';').collectE
|
|||
allprojects {
|
||||
project.ext {
|
||||
// for ide hacks...
|
||||
isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE
|
||||
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
|
||||
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
|
||||
gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea')
|
||||
|
||||
// for BWC testing
|
||||
|
@ -171,7 +174,7 @@ task verifyVersions {
|
|||
* after the backport of the backcompat code is complete.
|
||||
*/
|
||||
final boolean bwc_tests_enabled = true
|
||||
final String bwc_tests_disabled_issue = "" /* place a PR link here when commiting bwc changes */
|
||||
final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */
|
||||
if (bwc_tests_enabled == false) {
|
||||
if (bwc_tests_disabled_issue.isEmpty()) {
|
||||
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")
|
||||
|
@ -326,6 +329,9 @@ gradle.projectsEvaluated {
|
|||
// :test:framework:test cannot run before and after :server:test
|
||||
return
|
||||
}
|
||||
if (tasks.findByPath('test') != null && tasks.findByPath('integTest') != null) {
|
||||
integTest.mustRunAfter test
|
||||
}
|
||||
configurations.all { Configuration configuration ->
|
||||
/*
|
||||
* The featureAwarePlugin configuration has a dependency on x-pack:plugin:core and x-pack:plugin:core has a dependency on the
|
||||
|
@ -442,12 +448,19 @@ allprojects {
|
|||
}
|
||||
|
||||
File licenseHeaderFile;
|
||||
if (eclipse.project.name.startsWith(':x-pack')) {
|
||||
String prefix = ':x-pack';
|
||||
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
prefix = prefix.replace(':', '_')
|
||||
}
|
||||
if (eclipse.project.name.startsWith(prefix)) {
|
||||
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-header.txt')
|
||||
} else {
|
||||
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt')
|
||||
}
|
||||
String licenseHeader = licenseHeaderFile.getText('UTF-8').replace('\n', '\\\\n')
|
||||
|
||||
String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n'
|
||||
String licenseHeader = licenseHeaderFile.getText('UTF-8').replace(System.lineSeparator(), lineSeparator)
|
||||
task copyEclipseSettings(type: Copy) {
|
||||
// TODO: "package this up" for external builds
|
||||
from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings')
|
||||
|
@ -483,25 +496,17 @@ task run(type: Run) {
|
|||
impliesSubProjects = true
|
||||
}
|
||||
|
||||
task wrapper(type: Wrapper)
|
||||
|
||||
gradle.projectsEvaluated {
|
||||
|
||||
allprojects {
|
||||
tasks.withType(Wrapper) { Wrapper wrapper ->
|
||||
wrapper.distributionType = DistributionType.ALL
|
||||
|
||||
wrapper.doLast {
|
||||
wrapper {
|
||||
distributionType = DistributionType.ALL
|
||||
doLast {
|
||||
final DistributionLocator locator = new DistributionLocator()
|
||||
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
|
||||
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
|
||||
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
|
||||
final String sha256Sum = new String(sha256Uri.toURL().bytes)
|
||||
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
|
||||
}
|
||||
println "Added checksum to wrapper properties"
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static void assertLinesInFile(final Path path, final List<String> expectedLines) {
|
||||
|
@ -575,3 +580,28 @@ gradle.projectsEvaluated {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (System.properties.get("build.compare") != null) {
|
||||
apply plugin: 'compare-gradle-builds'
|
||||
compareGradleBuilds {
|
||||
ext.referenceProject = System.properties.get("build.compare")
|
||||
doFirst {
|
||||
if (file(referenceProject).exists() == false) {
|
||||
throw new GradleException(
|
||||
"Use git worktree to check out a version to compare against to ../elasticsearch_build_reference"
|
||||
)
|
||||
}
|
||||
}
|
||||
sourceBuild {
|
||||
gradleVersion = "4.8.1" // does not default to gradle weapper of project dir, but current version
|
||||
projectDir = referenceProject
|
||||
tasks = ["clean", "assemble"]
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
}
|
||||
targetBuild {
|
||||
tasks = ["clean", "assemble"]
|
||||
// use -Dorg.gradle.java.home= to alter jdk versions
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,12 +16,12 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.nio.file.Files
|
||||
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
apply plugin: 'groovy'
|
||||
plugins {
|
||||
id 'java-gradle-plugin'
|
||||
id 'groovy'
|
||||
}
|
||||
|
||||
group = 'org.elasticsearch.gradle'
|
||||
|
||||
|
@ -39,6 +39,12 @@ if (project == rootProject) {
|
|||
buildDir = 'build-bootstrap'
|
||||
}
|
||||
|
||||
// Make sure :buildSrc: doesn't generate classes incompatible with RUNTIME_JAVA_HOME
|
||||
// We can't use BuildPlugin here, so read from file
|
||||
String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim()
|
||||
targetCompatibility = minimumRuntimeVersion
|
||||
sourceCompatibility = minimumRuntimeVersion
|
||||
|
||||
/*****************************************************************************
|
||||
* Propagating version.properties to the rest of the build *
|
||||
*****************************************************************************/
|
||||
|
@ -83,9 +89,10 @@ repositories {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
compile gradleApi()
|
||||
compile localGroovy()
|
||||
compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}"
|
||||
compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
|
||||
|
||||
compile("junit:junit:${props.getProperty('junit')}") {
|
||||
transitive = false
|
||||
}
|
||||
|
@ -97,8 +104,10 @@ dependencies {
|
|||
compile 'de.thetaphi:forbiddenapis:2.5'
|
||||
compile 'org.apache.rat:apache-rat:0.11'
|
||||
compile "org.elasticsearch:jna:4.5.1"
|
||||
testCompile "junit:junit:${props.getProperty('junit')}"
|
||||
}
|
||||
|
||||
|
||||
// Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs
|
||||
// Use logging dependency instead
|
||||
// Gradle 4.3.1 stopped releasing the logging jars to jcenter, just use the last available one
|
||||
|
@ -106,7 +115,6 @@ GradleVersion logVersion = GradleVersion.current() > GradleVersion.version('4.3'
|
|||
|
||||
dependencies {
|
||||
compileOnly "org.gradle:gradle-logging:${logVersion.getVersion()}"
|
||||
compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -114,14 +122,12 @@ dependencies {
|
|||
*****************************************************************************/
|
||||
// this will only happen when buildSrc is built on its own during build init
|
||||
if (project == rootProject) {
|
||||
|
||||
repositories {
|
||||
if (System.getProperty("repos.mavenLocal") != null) {
|
||||
mavenLocal()
|
||||
}
|
||||
mavenCentral()
|
||||
}
|
||||
test.exclude 'org/elasticsearch/test/NamingConventionsCheckBadClasses*'
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -146,9 +152,6 @@ if (project != rootProject) {
|
|||
jarHell.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
// test for elasticsearch.build tries to run with ES...
|
||||
test.enabled = false
|
||||
|
||||
// TODO: re-enable once randomizedtesting gradle code is published and removed from here
|
||||
licenseHeaders.enabled = false
|
||||
|
||||
|
@ -159,14 +162,7 @@ if (project != rootProject) {
|
|||
}
|
||||
|
||||
namingConventions {
|
||||
testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase'
|
||||
integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase'
|
||||
testClass = 'org.elasticsearch.gradle.test.GradleUnitTestCase'
|
||||
integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase'
|
||||
}
|
||||
|
||||
task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
|
||||
checkForTestsInMain = true
|
||||
testClass = namingConventions.testClass
|
||||
integTestClass = namingConventions.integTestClass
|
||||
}
|
||||
precommit.dependsOn namingConventionsMain
|
||||
}
|
||||
|
|
|
@ -1,20 +1,44 @@
|
|||
package com.carrotsearch.gradle.junit4
|
||||
|
||||
import com.carrotsearch.ant.tasks.junit4.JUnit4
|
||||
import org.gradle.api.AntBuilder
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.UnknownTaskException
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.tasks.TaskContainer
|
||||
import org.gradle.api.tasks.TaskProvider
|
||||
import org.gradle.api.tasks.testing.Test
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
|
||||
class RandomizedTestingPlugin implements Plugin<Project> {
|
||||
|
||||
static private AtomicBoolean sanityCheckConfigured = new AtomicBoolean(false)
|
||||
|
||||
void apply(Project project) {
|
||||
setupSeed(project)
|
||||
replaceTestTask(project.tasks)
|
||||
configureAnt(project.ant)
|
||||
configureSanityCheck(project)
|
||||
}
|
||||
|
||||
private static void configureSanityCheck(Project project) {
|
||||
// Check the task graph to confirm tasks were indeed replaced
|
||||
// https://github.com/elastic/elasticsearch/issues/31324
|
||||
if (sanityCheckConfigured.getAndSet(true) == false) {
|
||||
project.rootProject.getGradle().getTaskGraph().whenReady {
|
||||
List<Task> nonConforming = project.getGradle().getTaskGraph().allTasks
|
||||
.findAll { it.name == "test" }
|
||||
.findAll { (it instanceof RandomizedTestingTask) == false}
|
||||
.collect { "${it.path} -> ${it.class}" }
|
||||
if (nonConforming.isEmpty() == false) {
|
||||
throw new GradleException("Found the ${nonConforming.size()} `test` tasks:" +
|
||||
"\n ${nonConforming.join("\n ")}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -45,29 +69,32 @@ class RandomizedTestingPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
static void replaceTestTask(TaskContainer tasks) {
|
||||
Test oldTestTask = tasks.findByPath('test')
|
||||
if (oldTestTask == null) {
|
||||
// Gradle 4.8 introduced lazy tasks, thus we deal both with the `test` task as well as it's provider
|
||||
// https://github.com/gradle/gradle/issues/5730#issuecomment-398822153
|
||||
// since we can't be sure if the task was ever realized, we remove both the provider and the task
|
||||
TaskProvider<Test> oldTestProvider
|
||||
try {
|
||||
oldTestProvider = tasks.getByNameLater(Test, 'test')
|
||||
} catch (UnknownTaskException unused) {
|
||||
// no test task, ok, user will use testing task on their own
|
||||
return
|
||||
}
|
||||
tasks.remove(oldTestTask)
|
||||
Test oldTestTask = oldTestProvider.get()
|
||||
|
||||
Map properties = [
|
||||
name: 'test',
|
||||
type: RandomizedTestingTask,
|
||||
dependsOn: oldTestTask.dependsOn,
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Runs unit tests with the randomized testing framework'
|
||||
]
|
||||
RandomizedTestingTask newTestTask = tasks.create(properties)
|
||||
newTestTask.classpath = oldTestTask.classpath
|
||||
newTestTask.testClassesDir = oldTestTask.project.sourceSets.test.output.classesDir
|
||||
// since gradle 4.5, tasks immutable dependencies are "hidden" (do not show up in dependsOn)
|
||||
// so we must explicitly add a dependency on generating the test classpath
|
||||
newTestTask.dependsOn('testClasses')
|
||||
// we still have to use replace here despite the remove above because the task container knows about the provider
|
||||
// by the same name
|
||||
RandomizedTestingTask newTestTask = tasks.replace('test', RandomizedTestingTask)
|
||||
newTestTask.configure{
|
||||
group = JavaBasePlugin.VERIFICATION_GROUP
|
||||
description = 'Runs unit tests with the randomized testing framework'
|
||||
dependsOn oldTestTask.dependsOn, 'testClasses'
|
||||
classpath = oldTestTask.classpath
|
||||
testClassesDirs = oldTestTask.project.sourceSets.test.output.classesDirs
|
||||
}
|
||||
|
||||
// hack so check task depends on custom test
|
||||
Task checkTask = tasks.findByPath('check')
|
||||
Task checkTask = tasks.getByName('check')
|
||||
checkTask.dependsOn.remove(oldTestProvider)
|
||||
checkTask.dependsOn.remove(oldTestTask)
|
||||
checkTask.dependsOn.add(newTestTask)
|
||||
}
|
||||
|
|
|
@ -6,18 +6,20 @@ import groovy.xml.NamespaceBuilder
|
|||
import groovy.xml.NamespaceBuilderSupport
|
||||
import org.apache.tools.ant.BuildException
|
||||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.Project
|
||||
import org.apache.tools.ant.RuntimeConfigurable
|
||||
import org.apache.tools.ant.UnknownElement
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.file.FileTreeElement
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.api.specs.Spec
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputDirectory
|
||||
import org.gradle.api.tasks.Optional
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.api.tasks.options.Option
|
||||
import org.gradle.api.tasks.util.PatternFilterable
|
||||
import org.gradle.api.tasks.util.PatternSet
|
||||
import org.gradle.internal.logging.progress.ProgressLoggerFactory
|
||||
|
@ -43,8 +45,8 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
@Input
|
||||
String parallelism = '1'
|
||||
|
||||
@InputDirectory
|
||||
File testClassesDir
|
||||
@Input
|
||||
FileCollection testClassesDirs
|
||||
|
||||
@Optional
|
||||
@Input
|
||||
|
@ -220,7 +222,7 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
listener = new DefaultLogger(
|
||||
errorPrintStream: System.err,
|
||||
outputPrintStream: System.out,
|
||||
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
|
||||
messageOutputLevel: Project.MSG_INFO)
|
||||
} else {
|
||||
// we want to buffer the info, and emit it if the test fails
|
||||
antLoggingBuffer = new ByteArrayOutputStream()
|
||||
|
@ -228,7 +230,7 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
listener = new DefaultLogger(
|
||||
errorPrintStream: stream,
|
||||
outputPrintStream: stream,
|
||||
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
|
||||
messageOutputLevel: Project.MSG_INFO)
|
||||
}
|
||||
project.ant.project.addBuildListener(listener)
|
||||
}
|
||||
|
@ -251,12 +253,10 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
if (argLine != null) {
|
||||
jvmarg(line: argLine)
|
||||
}
|
||||
fileset(dir: testClassesDir) {
|
||||
for (String includePattern : patternSet.getIncludes()) {
|
||||
include(name: includePattern)
|
||||
}
|
||||
for (String excludePattern : patternSet.getExcludes()) {
|
||||
exclude(name: excludePattern)
|
||||
testClassesDirs.each { testClassDir ->
|
||||
fileset(dir: testClassDir) {
|
||||
patternSet.getIncludes().each { include(name: it) }
|
||||
patternSet.getExcludes().each { exclude(name: it) }
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, Object> prop : systemProperties) {
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.gradle
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.eclipse.jgit.lib.Constants
|
||||
import org.eclipse.jgit.lib.RepositoryBuilder
|
||||
|
@ -58,9 +57,6 @@ import java.time.ZonedDateTime
|
|||
*/
|
||||
class BuildPlugin implements Plugin<Project> {
|
||||
|
||||
static final JavaVersion minimumRuntimeVersion = JavaVersion.VERSION_1_8
|
||||
static final JavaVersion minimumCompilerVersion = JavaVersion.VERSION_1_10
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) {
|
||||
|
@ -95,6 +91,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
/** Performs checks on the build environment and prints information about the build environment. */
|
||||
static void globalBuildInfo(Project project) {
|
||||
if (project.rootProject.ext.has('buildChecksDone') == false) {
|
||||
JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(
|
||||
BuildPlugin.class.getClassLoader().getResourceAsStream("minimumRuntimeVersion").text.trim()
|
||||
)
|
||||
JavaVersion minimumCompilerVersion = JavaVersion.toVersion(
|
||||
BuildPlugin.class.getClassLoader().getResourceAsStream("minimumCompilerVersion").text.trim()
|
||||
)
|
||||
String compilerJavaHome = findCompilerJavaHome()
|
||||
String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome)
|
||||
File gradleJavaHome = Jvm.current().javaHome
|
||||
|
@ -192,10 +194,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum
|
||||
project.rootProject.ext.javaVersions = javaVersions
|
||||
project.rootProject.ext.buildChecksDone = true
|
||||
project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion
|
||||
project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion
|
||||
}
|
||||
|
||||
project.targetCompatibility = minimumRuntimeVersion
|
||||
project.sourceCompatibility = minimumRuntimeVersion
|
||||
project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion
|
||||
project.sourceCompatibility = project.rootProject.ext.minimumRuntimeVersion
|
||||
|
||||
// set java home for each project, so they dont have to find it in the root project
|
||||
project.ext.compilerJavaHome = project.rootProject.ext.compilerJavaHome
|
||||
|
@ -348,7 +352,9 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// just a self contained test-fixture configuration, likely transitive and hellacious
|
||||
return
|
||||
}
|
||||
configuration.resolutionStrategy.failOnVersionConflict()
|
||||
configuration.resolutionStrategy {
|
||||
failOnVersionConflict()
|
||||
}
|
||||
})
|
||||
|
||||
// force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself
|
||||
|
@ -465,6 +471,24 @@ class BuildPlugin implements Plugin<Project> {
|
|||
|
||||
/**Configuration generation of maven poms. */
|
||||
public static void configurePomGeneration(Project project) {
|
||||
// Only works with `enableFeaturePreview('STABLE_PUBLISHING')`
|
||||
// https://github.com/gradle/gradle/issues/5696#issuecomment-396965185
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
|
||||
// The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it,
|
||||
// just make a copy.
|
||||
doLast {
|
||||
project.copy {
|
||||
from generatePOMTask.destination
|
||||
into "${project.buildDir}/distributions"
|
||||
rename { "${project.archivesBaseName}-${project.version}.pom" }
|
||||
}
|
||||
}
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
assemble.dependsOn(generatePOMTask)
|
||||
}
|
||||
}
|
||||
project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded {
|
||||
project.publishing {
|
||||
publications {
|
||||
|
@ -474,16 +498,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t ->
|
||||
// place the pom next to the jar it is for
|
||||
t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom")
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
assemble.dependsOn(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -625,6 +639,10 @@ class BuildPlugin implements Plugin<Project> {
|
|||
jarTask.manifest.attributes('Change': shortHash)
|
||||
}
|
||||
}
|
||||
// Force manifest entries that change by nature to a constant to be able to compare builds more effectively
|
||||
if (System.properties.getProperty("build.compare_friendly", "false") == "true") {
|
||||
jarTask.manifest.getAttributes().clear()
|
||||
}
|
||||
}
|
||||
// add license/notice files
|
||||
project.afterEvaluate {
|
||||
|
@ -673,6 +691,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
systemProperty 'tests.task', path
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
systemProperty 'jna.nosys', 'true'
|
||||
systemProperty 'es.scripting.exception_for_missing_value', 'true'
|
||||
// TODO: remove setting logging level via system property
|
||||
systemProperty 'tests.logger.level', 'WARN'
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
|
@ -741,7 +760,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.extensions.add('additionalTest', { String name, Closure config ->
|
||||
RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class)
|
||||
additionalTest.classpath = test.classpath
|
||||
additionalTest.testClassesDir = test.testClassesDir
|
||||
additionalTest.testClassesDirs = test.testClassesDirs
|
||||
additionalTest.configure(commonTestConfig(project))
|
||||
additionalTest.configure(config)
|
||||
additionalTest.dependsOn(project.tasks.testClasses)
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.tasks.Exec
|
||||
|
||||
/**
|
||||
* A wrapper around gradle's Exec task to capture output and log on error.
|
||||
*/
|
||||
class LoggedExec extends Exec {
|
||||
|
||||
protected ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
|
||||
LoggedExec() {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
standardOutput = output
|
||||
errorOutput = output
|
||||
ignoreExitValue = true
|
||||
doLast {
|
||||
if (execResult.exitValue != 0) {
|
||||
output.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package org.elasticsearch.gradle;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.tasks.Exec;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A wrapper around gradle's Exec task to capture output and log on error.
|
||||
*/
|
||||
public class LoggedExec extends Exec {
|
||||
|
||||
protected ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
|
||||
public LoggedExec() {
|
||||
if (getLogger().isInfoEnabled() == false) {
|
||||
setStandardOutput(output);
|
||||
setErrorOutput(output);
|
||||
setIgnoreExitValue(true);
|
||||
doLast(new Closure<Void>(this, this) {
|
||||
public void doCall(Task it) throws IOException {
|
||||
if (getExecResult().getExitValue() != 0) {
|
||||
for (String line : output.toString("UTF-8").split("\\R")) {
|
||||
getLogger().error(line);
|
||||
}
|
||||
throw new GradleException(
|
||||
"Process \'" + getExecutable() + " " +
|
||||
getArgs().stream().collect(Collectors.joining(" "))+
|
||||
"\' finished with non-zero exit value " +
|
||||
String.valueOf(getExecResult().getExitValue())
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
/**
|
||||
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
||||
*/
|
||||
class VersionProperties {
|
||||
static final Version elasticsearch
|
||||
static final String lucene
|
||||
static final Map<String, String> versions = new HashMap<>()
|
||||
static {
|
||||
Properties props = new Properties()
|
||||
InputStream propsStream = VersionProperties.class.getResourceAsStream('/version.properties')
|
||||
if (propsStream == null) {
|
||||
throw new RuntimeException('/version.properties resource missing')
|
||||
}
|
||||
props.load(propsStream)
|
||||
elasticsearch = Version.fromString(props.getProperty('elasticsearch'))
|
||||
lucene = props.getProperty('lucene')
|
||||
for (String property : props.stringPropertyNames()) {
|
||||
versions.put(property, props.getProperty(property))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
||||
*/
|
||||
public class VersionProperties {
|
||||
public static Version getElasticsearch() {
|
||||
return elasticsearch;
|
||||
}
|
||||
|
||||
public static String getLucene() {
|
||||
return lucene;
|
||||
}
|
||||
|
||||
public static Map<String, String> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
|
||||
private static final Version elasticsearch;
|
||||
private static final String lucene;
|
||||
private static final Map<String, String> versions = new HashMap<String, String>();
|
||||
static {
|
||||
Properties props = getVersionProperties();
|
||||
elasticsearch = Version.fromString(props.getProperty("elasticsearch"));
|
||||
lucene = props.getProperty("lucene");
|
||||
for (String property : props.stringPropertyNames()) {
|
||||
versions.put(property, props.getProperty(property));
|
||||
}
|
||||
}
|
||||
|
||||
private static Properties getVersionProperties() {
|
||||
Properties props = new Properties();
|
||||
InputStream propsStream = VersionProperties.class.getResourceAsStream("/version.properties");
|
||||
if (propsStream == null) {
|
||||
throw new RuntimeException("/version.properties resource missing");
|
||||
}
|
||||
try {
|
||||
props.load(propsStream);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return props;
|
||||
}
|
||||
}
|
|
@ -237,6 +237,18 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
current.println(" - stash_in_path")
|
||||
current.println(" - stash_path_replace")
|
||||
current.println(" - warnings")
|
||||
if (test.testEnv != null) {
|
||||
switch (test.testEnv) {
|
||||
case 'basic':
|
||||
case 'gold':
|
||||
case 'platinum':
|
||||
current.println(" - xpack")
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException('Unsupported testEnv: '
|
||||
+ test.testEnv)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (test.skipTest) {
|
||||
if (test.continued) {
|
||||
|
|
|
@ -84,6 +84,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
Snippet snippet = null
|
||||
StringBuilder contents = null
|
||||
List substitutions = null
|
||||
String testEnv = null
|
||||
Closure emit = {
|
||||
snippet.contents = contents.toString()
|
||||
contents = null
|
||||
|
@ -143,10 +144,14 @@ public class SnippetsTask extends DefaultTask {
|
|||
}
|
||||
file.eachLine('UTF-8') { String line, int lineNumber ->
|
||||
Matcher matcher
|
||||
matcher = line =~ /\[testenv="([^"]+)"\]\s*/
|
||||
if (matcher.matches()) {
|
||||
testEnv = matcher.group(1)
|
||||
}
|
||||
if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet
|
||||
if (snippet == null) {
|
||||
Path path = docs.dir.toPath().relativize(file.toPath())
|
||||
snippet = new Snippet(path: path, start: lineNumber)
|
||||
snippet = new Snippet(path: path, start: lineNumber, testEnv: testEnv)
|
||||
if (lastLanguageLine == lineNumber - 1) {
|
||||
snippet.language = lastLanguage
|
||||
}
|
||||
|
@ -297,6 +302,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
int start
|
||||
int end = NOT_FINISHED
|
||||
String contents
|
||||
String testEnv
|
||||
|
||||
Boolean console = null
|
||||
boolean test = false
|
||||
|
@ -321,6 +327,9 @@ public class SnippetsTask extends DefaultTask {
|
|||
}
|
||||
if (test) {
|
||||
result += '// TEST'
|
||||
if (testEnv != null) {
|
||||
result += "[testenv=$testEnv]"
|
||||
}
|
||||
if (catchPart) {
|
||||
result += "[catch: $catchPart]"
|
||||
}
|
||||
|
|
|
@ -71,7 +71,9 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
if (isModule) {
|
||||
project.integTestCluster.module(project)
|
||||
project.tasks.run.clusterConfig.module(project)
|
||||
project.tasks.run.clusterConfig.distribution = 'integ-test-zip'
|
||||
project.tasks.run.clusterConfig.distribution = System.getProperty(
|
||||
'run.distribution', 'integ-test-zip'
|
||||
)
|
||||
} else {
|
||||
project.integTestCluster.plugin(project.path)
|
||||
project.tasks.run.clusterConfig.plugin(project.path)
|
||||
|
@ -111,7 +113,7 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
private static void createIntegTestTask(Project project) {
|
||||
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||
integTest.mustRunAfter(project.precommit, project.test)
|
||||
project.integTestCluster.distribution = 'integ-test-zip'
|
||||
project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip')
|
||||
project.check.dependsOn(integTest)
|
||||
}
|
||||
|
||||
|
@ -157,16 +159,18 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
/** Adds a task to move jar and associated files to a "-client" name. */
|
||||
protected static void addClientJarTask(Project project) {
|
||||
Task clientJar = project.tasks.create('clientJar')
|
||||
clientJar.dependsOn(project.jar, 'generatePomFileForClientJarPublication', project.javadocJar, project.sourcesJar)
|
||||
clientJar.dependsOn(project.jar, project.tasks.generatePomFileForClientJarPublication, project.javadocJar, project.sourcesJar)
|
||||
clientJar.doFirst {
|
||||
Path jarFile = project.jar.outputs.files.singleFile.toPath()
|
||||
String clientFileName = jarFile.fileName.toString().replace(project.version, "client-${project.version}")
|
||||
Files.copy(jarFile, jarFile.resolveSibling(clientFileName), StandardCopyOption.REPLACE_EXISTING)
|
||||
|
||||
String pomFileName = jarFile.fileName.toString().replace('.jar', '.pom')
|
||||
String clientPomFileName = clientFileName.replace('.jar', '.pom')
|
||||
Files.copy(jarFile.resolveSibling(pomFileName), jarFile.resolveSibling(clientPomFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING)
|
||||
Files.copy(
|
||||
project.tasks.generatePomFileForClientJarPublication.outputs.files.singleFile.toPath(),
|
||||
jarFile.resolveSibling(clientPomFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
)
|
||||
|
||||
String sourcesFileName = jarFile.fileName.toString().replace('.jar', '-sources.jar')
|
||||
String clientSourcesFileName = clientFileName.replace('.jar', '-sources.jar')
|
||||
|
|
|
@ -50,11 +50,11 @@ public class LoggerUsageTask extends LoggedExec {
|
|||
List files = []
|
||||
// But only if the source sets that will make them exist
|
||||
if (project.sourceSets.findByName("main")) {
|
||||
files.add(project.sourceSets.main.output.classesDir)
|
||||
files.addAll(project.sourceSets.main.output.classesDirs.getFiles())
|
||||
dependsOn project.tasks.classes
|
||||
}
|
||||
if (project.sourceSets.findByName("test")) {
|
||||
files.add(project.sourceSets.test.output.classesDir)
|
||||
files.addAll(project.sourceSets.test.output.classesDirs.getFiles())
|
||||
dependsOn project.tasks.testClasses
|
||||
}
|
||||
/* In an extra twist, it isn't good enough that the source set
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.gradle.api.artifacts.Dependency
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
/**
|
||||
* Runs NamingConventionsCheck on a classpath/directory combo to verify that
|
||||
* tests are named according to our conventions so they'll be picked up by
|
||||
* gradle. Read the Javadoc for NamingConventionsCheck to learn more.
|
||||
*/
|
||||
public class NamingConventionsTask extends LoggedExec {
|
||||
/**
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
File successMarker = new File(project.buildDir, "markers/${this.name}")
|
||||
|
||||
/**
|
||||
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
|
||||
* integ tests correctly.
|
||||
*/
|
||||
@Input
|
||||
boolean skipIntegTestInDisguise = false
|
||||
|
||||
/**
|
||||
* Superclass for all tests.
|
||||
*/
|
||||
@Input
|
||||
String testClass = 'org.apache.lucene.util.LuceneTestCase'
|
||||
|
||||
/**
|
||||
* Superclass for all integration tests.
|
||||
*/
|
||||
@Input
|
||||
String integTestClass = 'org.elasticsearch.test.ESIntegTestCase'
|
||||
|
||||
/**
|
||||
* Should the test also check the main classpath for test classes instead of
|
||||
* doing the usual checks to the test classpath.
|
||||
*/
|
||||
@Input
|
||||
boolean checkForTestsInMain = false;
|
||||
|
||||
public NamingConventionsTask() {
|
||||
// Extra classpath contains the actual test
|
||||
if (false == project.configurations.names.contains('namingConventions')) {
|
||||
project.configurations.create('namingConventions')
|
||||
Dependency buildToolsDep = project.dependencies.add('namingConventions',
|
||||
"org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}")
|
||||
buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts.
|
||||
}
|
||||
FileCollection classpath = project.files(project.configurations.namingConventions,
|
||||
project.sourceSets.test.compileClasspath,
|
||||
project.sourceSets.test.output)
|
||||
dependsOn(classpath)
|
||||
inputs.files(classpath)
|
||||
description = "Tests that test classes aren't misnamed or misplaced"
|
||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
if (false == checkForTestsInMain) {
|
||||
/* This task is created by default for all subprojects with this
|
||||
* setting and there is no point in running it if the files don't
|
||||
* exist. */
|
||||
onlyIf { project.sourceSets.test.output.classesDir.exists() }
|
||||
}
|
||||
|
||||
/*
|
||||
* We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
|
||||
* ready for us. Strangely neither one on their own are good enough.
|
||||
*/
|
||||
project.afterEvaluate {
|
||||
doFirst {
|
||||
args('-Djna.nosys=true')
|
||||
args('-cp', classpath.asPath, 'org.elasticsearch.test.NamingConventionsCheck')
|
||||
args('--test-class', testClass)
|
||||
if (skipIntegTestInDisguise) {
|
||||
args('--skip-integ-tests-in-disguise')
|
||||
} else {
|
||||
args('--integ-test-class', integTestClass)
|
||||
}
|
||||
/*
|
||||
* The test framework has classes that fail the checks to validate that the checks fail properly.
|
||||
* Since these would cause the build to fail we have to ignore them with this parameter. The
|
||||
* process of ignoring them lets us validate that they were found so this ignore parameter acts
|
||||
* as the test for the NamingConventionsCheck.
|
||||
*/
|
||||
if (':build-tools'.equals(project.path)) {
|
||||
args('--self-test')
|
||||
}
|
||||
if (checkForTestsInMain) {
|
||||
args('--main')
|
||||
args('--')
|
||||
args(project.sourceSets.main.output.classesDir.absolutePath)
|
||||
} else {
|
||||
args('--')
|
||||
args(project.sourceSets.test.output.classesDir.absolutePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
doLast { successMarker.setText("", 'UTF-8') }
|
||||
}
|
||||
}
|
|
@ -0,0 +1,185 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.codehaus.groovy.runtime.ResourceGroovyMethods;
|
||||
import org.elasticsearch.gradle.LoggedExec;
|
||||
import org.elasticsearch.test.NamingConventionsCheck;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.api.plugins.JavaPluginConvention;
|
||||
import org.gradle.api.tasks.AbstractExecTask;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.SourceSetContainer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Runs NamingConventionsCheck on a classpath/directory combo to verify that
|
||||
* tests are named according to our conventions so they'll be picked up by
|
||||
* gradle. Read the Javadoc for NamingConventionsCheck to learn more.
|
||||
*/
|
||||
public class NamingConventionsTask extends LoggedExec {
|
||||
public NamingConventionsTask() {
|
||||
setDescription("Tests that test classes aren't misnamed or misplaced");
|
||||
final Project project = getProject();
|
||||
|
||||
SourceSetContainer sourceSets = getJavaSourceSets();
|
||||
final FileCollection classpath = project.files(
|
||||
// This works because the class only depends on one class from junit that will be available from the
|
||||
// tests compile classpath. It's the most straight forward way of telling Java where to find the main
|
||||
// class.
|
||||
NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation().getPath(),
|
||||
// the tests to be loaded
|
||||
checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(),
|
||||
sourceSets.getByName("test").getCompileClasspath(),
|
||||
sourceSets.getByName("test").getOutput()
|
||||
);
|
||||
dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName())));
|
||||
getInputs().files(classpath);
|
||||
|
||||
setExecutable(new File(
|
||||
Objects.requireNonNull(
|
||||
project.getExtensions().getByType(ExtraPropertiesExtension.class).get("runtimeJavaHome")
|
||||
).toString(),
|
||||
"bin/java")
|
||||
);
|
||||
|
||||
if (checkForTestsInMain == false) {
|
||||
/* This task is created by default for all subprojects with this
|
||||
* setting and there is no point in running it if the files don't
|
||||
* exist. */
|
||||
onlyIf((unused) -> getExistingClassesDirs().isEmpty() == false);
|
||||
}
|
||||
|
||||
/*
|
||||
* We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
|
||||
* ready for us. Strangely neither one on their own are good enough.
|
||||
*/
|
||||
project.afterEvaluate(new Closure<Task>(this, this) {
|
||||
public Task doCall(Project it) {
|
||||
return doFirst(new Closure<AbstractExecTask>(NamingConventionsTask.this, NamingConventionsTask.this) {
|
||||
public AbstractExecTask doCall(Task it) {
|
||||
args("-Djna.nosys=true");
|
||||
args("-cp", classpath.getAsPath(), "org.elasticsearch.test.NamingConventionsCheck");
|
||||
args("--test-class", getTestClass());
|
||||
if (skipIntegTestInDisguise) {
|
||||
args("--skip-integ-tests-in-disguise");
|
||||
} else {
|
||||
args("--integ-test-class", getIntegTestClass());
|
||||
}
|
||||
if (getCheckForTestsInMain()) {
|
||||
args("--main");
|
||||
args("--");
|
||||
} else {
|
||||
args("--");
|
||||
}
|
||||
return args(getExistingClassesDirs().getAsPath());
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
doLast(new Closure<Object>(this, this) {
|
||||
public void doCall(Task it) {
|
||||
try {
|
||||
ResourceGroovyMethods.setText(getSuccessMarker(), "", "UTF-8");
|
||||
} catch (IOException e) {
|
||||
throw new GradleException("io exception", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private SourceSetContainer getJavaSourceSets() {
|
||||
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
|
||||
}
|
||||
|
||||
public FileCollection getExistingClassesDirs() {
|
||||
FileCollection classesDirs = getJavaSourceSets().getByName(checkForTestsInMain ? "main" : "test")
|
||||
.getOutput().getClassesDirs();
|
||||
return classesDirs.filter(it -> it.exists());
|
||||
}
|
||||
|
||||
public File getSuccessMarker() {
|
||||
return successMarker;
|
||||
}
|
||||
|
||||
public void setSuccessMarker(File successMarker) {
|
||||
this.successMarker = successMarker;
|
||||
}
|
||||
|
||||
public boolean getSkipIntegTestInDisguise() {
|
||||
return skipIntegTestInDisguise;
|
||||
}
|
||||
|
||||
public boolean isSkipIntegTestInDisguise() {
|
||||
return skipIntegTestInDisguise;
|
||||
}
|
||||
|
||||
public void setSkipIntegTestInDisguise(boolean skipIntegTestInDisguise) {
|
||||
this.skipIntegTestInDisguise = skipIntegTestInDisguise;
|
||||
}
|
||||
|
||||
public String getTestClass() {
|
||||
return testClass;
|
||||
}
|
||||
|
||||
public void setTestClass(String testClass) {
|
||||
this.testClass = testClass;
|
||||
}
|
||||
|
||||
public String getIntegTestClass() {
|
||||
return integTestClass;
|
||||
}
|
||||
|
||||
public void setIntegTestClass(String integTestClass) {
|
||||
this.integTestClass = integTestClass;
|
||||
}
|
||||
|
||||
public boolean getCheckForTestsInMain() {
|
||||
return checkForTestsInMain;
|
||||
}
|
||||
|
||||
public boolean isCheckForTestsInMain() {
|
||||
return checkForTestsInMain;
|
||||
}
|
||||
|
||||
public void setCheckForTestsInMain(boolean checkForTestsInMain) {
|
||||
this.checkForTestsInMain = checkForTestsInMain;
|
||||
}
|
||||
|
||||
/**
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
private File successMarker = new File(getProject().getBuildDir(), "markers/" + this.getName());
|
||||
/**
|
||||
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
|
||||
* integ tests correctly.
|
||||
*/
|
||||
@Input
|
||||
private boolean skipIntegTestInDisguise = false;
|
||||
/**
|
||||
* Superclass for all tests.
|
||||
*/
|
||||
@Input
|
||||
private String testClass = "org.apache.lucene.util.LuceneTestCase";
|
||||
/**
|
||||
* Superclass for all integration tests.
|
||||
*/
|
||||
@Input
|
||||
private String integTestClass = "org.elasticsearch.test.ESIntegTestCase";
|
||||
/**
|
||||
* Should the test also check the main classpath for test classes instead of
|
||||
* doing the usual checks to the test classpath.
|
||||
*/
|
||||
@Input
|
||||
private boolean checkForTestsInMain = false;
|
||||
}
|
|
@ -88,6 +88,9 @@ class ClusterFormationTasks {
|
|||
Configuration currentDistro = project.configurations.create("${prefix}_elasticsearchDistro")
|
||||
Configuration bwcDistro = project.configurations.create("${prefix}_elasticsearchBwcDistro")
|
||||
Configuration bwcPlugins = project.configurations.create("${prefix}_elasticsearchBwcPlugins")
|
||||
if (System.getProperty('tests.distribution', 'oss-zip') == 'integ-test-zip') {
|
||||
throw new Exception("tests.distribution=integ-test-zip is not supported")
|
||||
}
|
||||
configureDistributionDependency(project, config.distribution, currentDistro, VersionProperties.elasticsearch)
|
||||
if (config.numBwcNodes > 0) {
|
||||
if (config.bwcVersion == null) {
|
||||
|
@ -533,7 +536,8 @@ class ClusterFormationTasks {
|
|||
|
||||
static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) {
|
||||
if (node.config.distribution != 'integ-test-zip') {
|
||||
throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!")
|
||||
project.logger.info("Not installing modules for $name, ${node.config.distribution} already has them")
|
||||
return setup
|
||||
}
|
||||
if (module.plugins.hasPlugin(PluginBuildPlugin) == false) {
|
||||
throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin")
|
||||
|
@ -643,6 +647,9 @@ class ClusterFormationTasks {
|
|||
BuildPlugin.requireJavaHome(start, node.javaVersion)
|
||||
}
|
||||
start.doLast(elasticsearchRunner)
|
||||
start.doFirst {
|
||||
project.logger.info("Starting node in ${node.clusterName} distribution: ${node.config.distribution}")
|
||||
}
|
||||
return start
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class RestIntegTestTask extends DefaultTask {
|
|||
clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
|
||||
runner.dependsOn(clusterInit)
|
||||
runner.classpath = project.sourceSets.test.runtimeClasspath
|
||||
runner.testClassesDir = project.sourceSets.test.output.classesDir
|
||||
runner.testClassesDirs = project.sourceSets.test.output.classesDirs
|
||||
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
|
||||
|
||||
// start with the common test configuration
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.gradle.api.Plugin
|
|||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.tasks.compile.JavaCompile
|
||||
|
||||
/**
|
||||
* Configures the build to compile tests against Elasticsearch's test framework
|
||||
|
@ -61,5 +62,12 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
|
||||
PrecommitTasks.create(project, false)
|
||||
project.check.dependsOn(project.precommit)
|
||||
|
||||
project.tasks.withType(JavaCompile) {
|
||||
// This will be the default in Gradle 5.0
|
||||
if (options.compilerArgs.contains("-processor") == false) {
|
||||
options.compilerArgs << '-proc:none'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,15 +47,8 @@ public class StandaloneTestPlugin implements Plugin<Project> {
|
|||
test.configure(BuildPlugin.commonTestConfig(project))
|
||||
BuildPlugin.configureCompile(project)
|
||||
test.classpath = project.sourceSets.test.runtimeClasspath
|
||||
test.testClassesDir project.sourceSets.test.output.classesDir
|
||||
test.testClassesDirs = project.sourceSets.test.output.classesDirs
|
||||
test.mustRunAfter(project.precommit)
|
||||
project.check.dependsOn(test)
|
||||
|
||||
project.tasks.withType(JavaCompile) {
|
||||
// This will be the default in Gradle 5.0
|
||||
if (options.compilerArgs.contains("-processor") == false) {
|
||||
options.compilerArgs << '-proc:none'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,14 +22,9 @@ import org.apache.commons.io.output.TeeOutputStream
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.Optional
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.internal.logging.progress.ProgressLoggerFactory
|
||||
|
||||
import javax.inject.Inject
|
||||
import java.util.concurrent.CountDownLatch
|
||||
import java.util.concurrent.locks.Lock
|
||||
import java.util.concurrent.locks.ReadWriteLock
|
||||
import java.util.concurrent.locks.ReentrantLock
|
||||
|
||||
/**
|
||||
* Runs a vagrant command. Pretty much like Exec task but with a nicer output
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.nio.file.FileVisitResult;
|
||||
|
@ -30,6 +31,7 @@ import java.nio.file.attribute.BasicFileAttributes;
|
|||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Checks that all tests in a directory are named according to our naming conventions. This is important because tests that do not follow
|
||||
|
@ -37,19 +39,13 @@ import java.util.Set;
|
|||
* a class with a main method so gradle can call it for each project. This has the advantage of allowing gradle to calculate when it is
|
||||
* {@code UP-TO-DATE} so it can be skipped if the compiled classes haven't changed. This is useful on large modules for which checking all
|
||||
* the modules can be slow.
|
||||
*
|
||||
* Annoyingly, this cannot be tested using standard unit tests because to do so you'd have to declare classes that violate the rules. That
|
||||
* would cause the test fail which would prevent the build from passing. So we have to make a mechanism for removing those test classes. Now
|
||||
* that we have such a mechanism it isn't much work to fail the process if we don't detect the offending classes. Thus, the funky
|
||||
* {@code --self-test} that is only run in the test:framework project.
|
||||
*/
|
||||
public class NamingConventionsCheck {
|
||||
public static void main(String[] args) throws IOException {
|
||||
Class<?> testClass = null;
|
||||
Class<?> integTestClass = null;
|
||||
Path rootPath = null;
|
||||
String rootPathList = null;
|
||||
boolean skipIntegTestsInDisguise = false;
|
||||
boolean selfTest = false;
|
||||
boolean checkMainClasses = false;
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
String arg = args[i];
|
||||
|
@ -63,14 +59,11 @@ public class NamingConventionsCheck {
|
|||
case "--skip-integ-tests-in-disguise":
|
||||
skipIntegTestsInDisguise = true;
|
||||
break;
|
||||
case "--self-test":
|
||||
selfTest = true;
|
||||
break;
|
||||
case "--main":
|
||||
checkMainClasses = true;
|
||||
break;
|
||||
case "--":
|
||||
rootPath = Paths.get(args[++i]);
|
||||
rootPathList = args[++i];
|
||||
break;
|
||||
default:
|
||||
fail("unsupported argument '" + arg + "'");
|
||||
|
@ -78,44 +71,49 @@ public class NamingConventionsCheck {
|
|||
}
|
||||
|
||||
NamingConventionsCheck check = new NamingConventionsCheck(testClass, integTestClass);
|
||||
if (checkMainClasses) {
|
||||
check.checkMain(rootPath);
|
||||
} else {
|
||||
check.checkTests(rootPath, skipIntegTestsInDisguise);
|
||||
}
|
||||
|
||||
if (selfTest) {
|
||||
for (String rootDir : rootPathList.split(Pattern.quote(File.pathSeparator))) {
|
||||
Path rootPath = Paths.get(rootDir);
|
||||
if (checkMainClasses) {
|
||||
assertViolation(NamingConventionsCheckInMainTests.class.getName(), check.testsInMain);
|
||||
assertViolation(NamingConventionsCheckInMainIT.class.getName(), check.testsInMain);
|
||||
check.checkMain(rootPath);
|
||||
} else {
|
||||
assertViolation("WrongName", check.missingSuffix);
|
||||
assertViolation("WrongNameTheSecond", check.missingSuffix);
|
||||
assertViolation("DummyAbstractTests", check.notRunnable);
|
||||
assertViolation("DummyInterfaceTests", check.notRunnable);
|
||||
assertViolation("InnerTests", check.innerClasses);
|
||||
assertViolation("NotImplementingTests", check.notImplementing);
|
||||
assertViolation("PlainUnit", check.pureUnitTest);
|
||||
check.checkTests(rootPath, skipIntegTestsInDisguise);
|
||||
}
|
||||
}
|
||||
|
||||
// Now we should have no violations
|
||||
assertNoViolations(
|
||||
int exitCode = 0 ;
|
||||
exitCode += countAndPrintViolations(
|
||||
"Not all subclasses of " + check.testClass.getSimpleName()
|
||||
+ " match the naming convention. Concrete classes must end with [Tests]",
|
||||
check.missingSuffix);
|
||||
assertNoViolations("Classes ending with [Tests] are abstract or interfaces", check.notRunnable);
|
||||
assertNoViolations("Found inner classes that are tests, which are excluded from the test runner", check.innerClasses);
|
||||
assertNoViolations("Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]", check.pureUnitTest);
|
||||
assertNoViolations("Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]", check.notImplementing);
|
||||
assertNoViolations(
|
||||
"Classes ending with [Tests] or [IT] or extending [" + check.testClass.getSimpleName() + "] must be in src/test/java",
|
||||
check.testsInMain);
|
||||
check.missingSuffix) ;
|
||||
exitCode += countAndPrintViolations(
|
||||
"Classes ending with [Tests] are abstract or interfaces",
|
||||
check.notRunnable
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Found inner classes that are tests, which are excluded from the test runner",
|
||||
check.innerClasses
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]",
|
||||
check.pureUnitTest
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]",
|
||||
check.notImplementing
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Classes ending with [Tests] or [IT] or extending [" +
|
||||
check.testClass.getSimpleName() + "] must be in src/test/java",
|
||||
check.testsInMain
|
||||
);
|
||||
if (skipIntegTestsInDisguise == false) {
|
||||
assertNoViolations(
|
||||
"Subclasses of " + check.integTestClass.getSimpleName() + " should end with IT as they are integration tests",
|
||||
check.integTestsInDisguise);
|
||||
exitCode += countAndPrintViolations("Subclasses of " + check.integTestClass.getSimpleName() +
|
||||
" should end with IT as they are integration tests",
|
||||
check.integTestsInDisguise
|
||||
);
|
||||
}
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
private final Set<Class<?>> notImplementing = new HashSet<>();
|
||||
|
@ -138,7 +136,9 @@ public class NamingConventionsCheck {
|
|||
Files.walkFileTree(rootPath, new TestClassVisitor() {
|
||||
@Override
|
||||
protected void visitTestClass(Class<?> clazz) {
|
||||
if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) {
|
||||
if (skipTestsInDisguised == false &&
|
||||
integTestClass.isAssignableFrom(clazz) &&
|
||||
clazz != integTestClass) {
|
||||
integTestsInDisguise.add(clazz);
|
||||
}
|
||||
if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
|
||||
|
@ -196,18 +196,15 @@ public class NamingConventionsCheck {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fail the process if there are any violations in the set. Named to look like a junit assertion even though it isn't because it is
|
||||
* similar enough.
|
||||
*/
|
||||
private static void assertNoViolations(String message, Set<Class<?>> set) {
|
||||
private static int countAndPrintViolations(String message, Set<Class<?>> set) {
|
||||
if (false == set.isEmpty()) {
|
||||
System.err.println(message + ":");
|
||||
for (Class<?> bad : set) {
|
||||
System.err.println(" * " + bad.getName());
|
||||
}
|
||||
System.exit(1);
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -254,15 +251,16 @@ public class NamingConventionsCheck {
|
|||
* Visit classes named like a test.
|
||||
*/
|
||||
protected abstract void visitTestClass(Class<?> clazz);
|
||||
|
||||
/**
|
||||
* Visit classes named like an integration test.
|
||||
*/
|
||||
protected abstract void visitIntegrationTestClass(Class<?> clazz);
|
||||
|
||||
/**
|
||||
* Visit classes not named like a test at all.
|
||||
*/
|
||||
protected abstract void visitOtherClass(Class<?> clazz);
|
||||
|
||||
@Override
|
||||
public final FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||
// First we visit the root directory
|
||||
|
@ -310,5 +308,7 @@ public class NamingConventionsCheck {
|
|||
public final FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
|
||||
throw exc;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -88,7 +88,8 @@ java.lang.Thread#getAllStackTraces()
|
|||
|
||||
@defaultMessage Stopping threads explicitly leads to inconsistent states. Use interrupt() instead.
|
||||
java.lang.Thread#stop()
|
||||
java.lang.Thread#stop(java.lang.Throwable)
|
||||
# uncomment when https://github.com/elastic/elasticsearch/issues/31715 is fixed
|
||||
# java.lang.Thread#stop(java.lang.Throwable)
|
||||
|
||||
@defaultMessage Please do not terminate the application
|
||||
java.lang.System#exit(int)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
1.10
|
|
@ -0,0 +1 @@
|
|||
1.8
|
|
@ -1,6 +1,9 @@
|
|||
package org.elasticsearch.gradle
|
||||
|
||||
class VersionCollectionTest extends GroovyTestCase {
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase
|
||||
import org.junit.Test
|
||||
|
||||
class VersionCollectionTests extends GradleUnitTestCase {
|
||||
|
||||
String formatVersion(String version) {
|
||||
return " public static final Version V_${version.replaceAll("\\.", "_")} "
|
||||
|
@ -16,6 +19,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* branched from Major-1.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior
|
||||
* from 7.0 perspective, or master at the time of this writing.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMajorUnreleasedWithExistingStagedMinorRelease() {
|
||||
VersionCollection vc = new VersionCollection(allVersions)
|
||||
assertNotNull(vc)
|
||||
|
@ -51,6 +55,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test
|
||||
* simulates the behavior from 7.0 perspective, or master at the time of this writing.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMajorUnreleasedWithoutStagedMinorRelease() {
|
||||
List localVersion = allVersions.clone()
|
||||
localVersion.add(formatVersion('6.2.1')) // release 6.2
|
||||
|
@ -89,6 +94,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* branched from Major.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior
|
||||
* from 6.3 perspective.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMinorReleasedBranch() {
|
||||
List localVersion = allVersions.clone()
|
||||
localVersion.removeAll { it.toString().contains('7_0_0')} // remove all the 7.x so that the actual version is 6.3 (6.x)
|
||||
|
@ -126,6 +132,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test
|
||||
* simulates the behavior from 6.3 perspective.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMinorReleasedBranchNoStagedMinor() {
|
||||
List localVersion = allVersions.clone()
|
||||
// remove all the 7.x and add a 6.2.1 which means 6.2 was released
|
||||
|
@ -162,6 +169,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* This validates the logic of being on a released minor branch. At the time of writing, 6.2 is unreleased, so this is equivalent of being
|
||||
* on 6.1.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstOldMinor() {
|
||||
|
||||
List localVersion = allVersions.clone()
|
||||
|
@ -195,6 +203,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* This validates the lower bound of wire compat, which is 5.0. It also validates that the span of 2.x to 5.x if it is decided to port
|
||||
* this fix all the way to the maint 5.6 release.
|
||||
*/
|
||||
@Test
|
||||
void testFloorOfWireCompatVersions() {
|
||||
List localVersion = [formatVersion('2.0.0'), formatVersion('2.0.1'), formatVersion('2.1.0'), formatVersion('2.1.1'),
|
||||
formatVersion('5.0.0'), formatVersion('5.0.1'), formatVersion('5.1.0'), formatVersion('5.1.1'),
|
|
@ -19,31 +19,41 @@
|
|||
|
||||
package org.elasticsearch.gradle.doc
|
||||
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.junit.Rule
|
||||
import org.junit.rules.ExpectedException
|
||||
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck
|
||||
|
||||
class RestTestFromSnippetsTaskTests extends GradleUnitTestCase {
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedEx = ExpectedException.none()
|
||||
|
||||
class RestTestFromSnippetsTaskTest extends GroovyTestCase {
|
||||
void testInvalidBlockQuote() {
|
||||
String input = "\"foo\": \"\"\"bar\"";
|
||||
String message = shouldFail({ replaceBlockQuote(input) });
|
||||
assertEquals("Invalid block quote starting at 7 in:\n$input", message);
|
||||
String input = "\"foo\": \"\"\"bar\""
|
||||
expectedEx.expect(InvalidUserDataException.class)
|
||||
expectedEx.expectMessage("Invalid block quote starting at 7 in:\n$input")
|
||||
replaceBlockQuote(input)
|
||||
}
|
||||
|
||||
void testSimpleBlockQuote() {
|
||||
assertEquals("\"foo\": \"bort baz\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""))
|
||||
}
|
||||
|
||||
void testMultipleBlockQuotes() {
|
||||
assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""))
|
||||
}
|
||||
|
||||
void testEscapingInBlockQuote() {
|
||||
assertEquals("\"foo\": \"bort\\\" baz\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""))
|
||||
assertEquals("\"foo\": \"bort\\n baz\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""))
|
||||
}
|
||||
|
||||
void testIsDocWriteRequest() {
|
|
@ -0,0 +1,75 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.gradle.testkit.runner.TaskOutcome;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class NamingConventionsTaskIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testPluginCanBeApplied() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("hello", "-s", "-PcheckForTestsInMain=false")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":hello").getOutcome());
|
||||
String output = result.getOutput();
|
||||
assertTrue(output, output.contains("build plugin can be applied"));
|
||||
}
|
||||
|
||||
public void testNameCheckFailsAsItShould() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("namingConventions", "-s", "-PcheckForTestsInMain=false")
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertNotNull("task did not run", result.task(":namingConventions"));
|
||||
assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome());
|
||||
String output = result.getOutput();
|
||||
for (String line : Arrays.asList(
|
||||
"Found inner classes that are tests, which are excluded from the test runner:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT$InternalInvalidTests",
|
||||
"Classes ending with [Tests] must subclass [UnitTestCase]:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT",
|
||||
"Not all subclasses of UnitTestCase match the naming convention. Concrete classes must end with [Tests]:",
|
||||
"* org.elasticsearch.test.WrongName")) {
|
||||
assertTrue(
|
||||
"expected: '" + line + "' but it was not found in the output:\n" + output,
|
||||
output.contains(line)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public void testNameCheckFailsAsItShouldWithMain() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("namingConventions", "-s", "-PcheckForTestsInMain=true")
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertNotNull("task did not run", result.task(":namingConventions"));
|
||||
assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome());
|
||||
|
||||
String output = result.getOutput();
|
||||
for (String line : Arrays.asList(
|
||||
"Classes ending with [Tests] or [IT] or extending [UnitTestCase] must be in src/test/java:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyInterfaceTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyAbstractTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$InnerTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$NotImplementingTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongNameTheSecond",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongName")) {
|
||||
assertTrue(
|
||||
"expected: '" + line + "' but it was not found in the output:\n"+output,
|
||||
output.contains(line)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
|
||||
import org.junit.Assert;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(RandomizedRunner.class)
|
||||
@TestMethodProviders({
|
||||
JUnit4MethodProvider.class,
|
||||
JUnit3MethodProvider.class
|
||||
})
|
||||
@ThreadLeakLingering(linger = 5000) // wait for "Connection worker" to die
|
||||
public abstract class BaseTestCase extends Assert {
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
||||
|
||||
protected File getProjectDir(String name) {
|
||||
File root = new File("src/testKit/");
|
||||
if (root.exists() == false) {
|
||||
throw new RuntimeException("Could not find resources dir for integration tests. " +
|
||||
"Note that these tests can only be ran by Gradle and are not currently supported by the IDE");
|
||||
}
|
||||
return new File(root, name);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(RandomizedRunner.class)
|
||||
@TestMethodProviders({
|
||||
JUnit4MethodProvider.class,
|
||||
JUnit3MethodProvider.class
|
||||
})
|
||||
public abstract class GradleUnitTestCase extends BaseTestCase {
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.ClassModel;
|
||||
import com.carrotsearch.randomizedtesting.ClassModel.MethodModel;
|
||||
import com.carrotsearch.randomizedtesting.TestMethodProvider;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Backwards compatible test* method provider (public, non-static).
|
||||
*
|
||||
* copy of org.apache.lucene.util.LuceneJUnit3MethodProvider to avoid a dependency between build and test fw.
|
||||
*/
|
||||
public final class JUnit3MethodProvider implements TestMethodProvider {
|
||||
@Override
|
||||
public Collection<Method> getTestMethods(Class<?> suiteClass, ClassModel classModel) {
|
||||
Map<Method,MethodModel> methods = classModel.getMethods();
|
||||
ArrayList<Method> result = new ArrayList<>();
|
||||
for (MethodModel mm : methods.values()) {
|
||||
// Skip any methods that have overrieds/ shadows.
|
||||
if (mm.getDown() != null) continue;
|
||||
|
||||
Method m = mm.element;
|
||||
if (m.getName().startsWith("test") &&
|
||||
Modifier.isPublic(m.getModifiers()) &&
|
||||
!Modifier.isStatic(m.getModifiers()) &&
|
||||
m.getParameterTypes().length == 0) {
|
||||
result.add(m);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
plugins {
|
||||
id 'java'
|
||||
id 'elasticsearch.build'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
dependenciesInfo.enabled = false
|
||||
forbiddenApisMain.enabled = false
|
||||
forbiddenApisTest.enabled = false
|
||||
jarHell.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
ext.licenseFile = file("$buildDir/dummy/license")
|
||||
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||
|
||||
task hello {
|
||||
doFirst {
|
||||
println "build plugin can be applied"
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile "junit:junit:${versions.junit}"
|
||||
}
|
||||
|
||||
namingConventions {
|
||||
checkForTestsInMain = project.property("checkForTestsInMain") == "true"
|
||||
testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase'
|
||||
integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase'
|
||||
}
|
|
@ -23,4 +23,9 @@ package org.elasticsearch.test;
|
|||
* This class should fail the naming conventions self test.
|
||||
*/
|
||||
public class NamingConventionsCheckInMainIT {
|
||||
|
||||
public static class InternalInvalidTests extends NamingConventionsCheckBadClasses.UnitTestCase {
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
/**
|
||||
* This class should fail the naming conventions self test.
|
||||
*/
|
||||
public class WrongName extends NamingConventionsCheckBadClasses.UnitTestCase {
|
||||
}
|
|
@ -24,7 +24,7 @@ buildscript {
|
|||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2'
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -46,9 +46,7 @@ mainClassName = 'org.elasticsearch.client.benchmark.BenchmarkMain'
|
|||
|
||||
|
||||
// never try to invoke tests on the benchmark project - there aren't any
|
||||
check.dependsOn.remove(test)
|
||||
// explicitly override the test task too in case somebody invokes 'gradle test' so it won't trip
|
||||
task test(type: Test, overwrite: true)
|
||||
test.enabled = false
|
||||
|
||||
dependencies {
|
||||
compile 'org.apache.commons:commons-math3:3.2'
|
||||
|
|
|
@ -329,7 +329,7 @@ public class NoopSearchRequestBuilder extends ActionRequestBuilder<SearchRequest
|
|||
*
|
||||
* @see org.elasticsearch.search.sort.SortBuilders
|
||||
*/
|
||||
public NoopSearchRequestBuilder addSort(SortBuilder sort) {
|
||||
public NoopSearchRequestBuilder addSort(SortBuilder<?> sort) {
|
||||
sourceBuilder().sort(sort);
|
||||
return this;
|
||||
}
|
||||
|
@ -415,7 +415,7 @@ public class NoopSearchRequestBuilder extends ActionRequestBuilder<SearchRequest
|
|||
* @param window rescore window
|
||||
* @return this for chaining
|
||||
*/
|
||||
public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) {
|
||||
public NoopSearchRequestBuilder setRescorer(RescorerBuilder<?> rescorer, int window) {
|
||||
sourceBuilder().clearRescorers();
|
||||
return addRescorer(rescorer.windowSize(window));
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ dependencies {
|
|||
compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
|
||||
compile "org.elasticsearch.plugin:rank-eval-client:${version}"
|
||||
compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
|
||||
compile project(':x-pack:protocol') // TODO bundle into the jar
|
||||
|
||||
testCompile "org.elasticsearch.client:test:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
|
|
|
@ -22,6 +22,8 @@ package org.elasticsearch.client;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -72,6 +74,35 @@ public final class ClusterClient {
|
|||
options, ClusterUpdateSettingsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the cluster wide settings using the Cluster Get Settings API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-get-settings.html"> Cluster Get Settings
|
||||
* API on elastic.co</a>
|
||||
* @param clusterGetSettingsRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public ClusterGetSettingsResponse getSettings(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(clusterGetSettingsRequest, RequestConverters::clusterGetSettings,
|
||||
options, ClusterGetSettingsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get the cluster wide settings using the Cluster Get Settings API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-get-settings.html"> Cluster Get Settings
|
||||
* API on elastic.co</a>
|
||||
* @param clusterGetSettingsRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getSettingsAsync(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options,
|
||||
ActionListener<ClusterGetSettingsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(clusterGetSettingsRequest, RequestConverters::clusterGetSettings,
|
||||
options, ClusterGetSettingsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cluster health using the Cluster Health API.
|
||||
* See
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
|
@ -37,6 +39,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
|
@ -435,6 +438,34 @@ public final class IndicesClient {
|
|||
GetSettingsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve information about one or more indexes
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html">
|
||||
* Indices Get Index API on elastic.co</a>
|
||||
* @param getIndexRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetIndexResponse get(GetIndexRequest getIndexRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getIndexRequest, RequestConverters::getIndex, options,
|
||||
GetIndexResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve information about one or more indexes
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html">
|
||||
* Indices Get Index API on elastic.co</a>
|
||||
* @param getIndexRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options,
|
||||
ActionListener<GetIndexResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexRequest, RequestConverters::getIndex, options,
|
||||
GetIndexResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Force merge one or more indices using the Force Merge API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-forcemerge.html">
|
||||
|
@ -752,4 +783,32 @@ public final class IndicesClient {
|
|||
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates,
|
||||
options, GetIndexTemplatesResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the analyze API
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-analyze.html">Analyze API on elastic.co</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::analyze, options,
|
||||
AnalyzeResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously calls the analyze API
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-analyze.html">Analyze API on elastic.co</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void analyzeAsync(AnalyzeRequest request, RequestOptions options,
|
||||
ActionListener<AnalyzeResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::analyze, options,
|
||||
AnalyzeResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,12 +36,16 @@ import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteReposito
|
|||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
|
@ -65,14 +69,15 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateReque
|
|||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
|
@ -99,7 +104,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
|
@ -109,8 +116,10 @@ import java.io.IOException;
|
|||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
final class RequestConverters {
|
||||
static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON;
|
||||
|
@ -602,6 +611,21 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template");
|
||||
|
||||
Params params = new Params(request);
|
||||
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
|
||||
if (multiSearchTemplateRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) {
|
||||
params.putParam("max_concurrent_searches", Integer.toString(multiSearchTemplateRequest.maxConcurrentSearchRequests()));
|
||||
}
|
||||
|
||||
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
||||
byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent);
|
||||
request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type())));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
|
||||
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
|
||||
(getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
|
||||
|
@ -618,6 +642,19 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request explain(ExplainRequest explainRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME,
|
||||
endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"));
|
||||
|
||||
Params params = new Params(request);
|
||||
params.withStoredFields(explainRequest.storedFields());
|
||||
params.withFetchSourceContext(explainRequest.fetchSourceContext());
|
||||
params.withRouting(explainRequest.routing());
|
||||
params.withPreference(explainRequest.preference());
|
||||
request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps"));
|
||||
|
||||
|
@ -677,6 +714,17 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings");
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withLocal(clusterGetSettingsRequest.local());
|
||||
parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults());
|
||||
parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getPipeline(GetPipelineRequest getPipelineRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
|
@ -789,6 +837,22 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getIndex(GetIndexRequest getIndexRequest) {
|
||||
String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices();
|
||||
|
||||
String endpoint = endpoint(indices);
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params params = new Params(request);
|
||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||
params.withLocal(getIndexRequest.local());
|
||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||
params.withHuman(getIndexRequest.humanReadable());
|
||||
params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request indicesExist(GetIndexRequest getIndexRequest) {
|
||||
// this can be called with no indices as argument by transport client, not via REST though
|
||||
if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) {
|
||||
|
@ -866,6 +930,39 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_snapshot")
|
||||
.addPathPart(createSnapshotRequest.repository())
|
||||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(getSnapshotsRequest.repository());
|
||||
String endpoint;
|
||||
if (getSnapshotsRequest.snapshots().length == 0) {
|
||||
endpoint = endpointBuilder.addPathPart("_all").build();
|
||||
} else {
|
||||
endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build();
|
||||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(deleteSnapshotRequest.repository())
|
||||
|
@ -942,6 +1039,18 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request analyze(AnalyzeRequest request) throws IOException {
|
||||
EndpointBuilder builder = new EndpointBuilder();
|
||||
String index = request.index();
|
||||
if (index != null) {
|
||||
builder.addPathPart(index);
|
||||
}
|
||||
builder.addPathPartAsIs("_analyze");
|
||||
Request req = new Request(HttpGet.METHOD_NAME, builder.build());
|
||||
req.setEntity(createEntity(request, REQUEST_BODY_CONTENT_TYPE));
|
||||
return req;
|
||||
}
|
||||
|
||||
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
@ -959,6 +1068,19 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request xPackInfo(XPackInfoRequest infoRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack");
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
request.addParameter("human", "false");
|
||||
}
|
||||
if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) {
|
||||
request.addParameter("categories", infoRequest.getCategories().stream()
|
||||
.map(c -> c.toString().toLowerCase(Locale.ROOT))
|
||||
.collect(Collectors.joining(",")));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
|
||||
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
|
||||
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.elasticsearch.action.bulk.BulkRequest;
|
|||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
|
@ -64,8 +66,12 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
import org.elasticsearch.index.rankeval.RankEvalResponse;
|
||||
import org.elasticsearch.plugins.spi.NamedXContentProvider;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateResponse;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
|
@ -614,6 +620,42 @@ public class RestHighLevelClient implements Closeable {
|
|||
SearchTemplateResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a request using the Explain API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html">Explain API on elastic.co</a>
|
||||
* @param explainRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public final ExplainResponse explain(ExplainRequest explainRequest, RequestOptions options) throws IOException {
|
||||
return performRequest(explainRequest, RequestConverters::explain, options,
|
||||
response -> {
|
||||
CheckedFunction<XContentParser, ExplainResponse, IOException> entityParser =
|
||||
parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response));
|
||||
return parseEntity(response.getEntity(), entityParser);
|
||||
},
|
||||
singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Explain API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html">Explain API on elastic.co</a>
|
||||
* @param explainRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public final void explainAsync(ExplainRequest explainRequest, RequestOptions options, ActionListener<ExplainResponse> listener) {
|
||||
performRequestAsync(explainRequest, RequestConverters::explain, options,
|
||||
response -> {
|
||||
CheckedFunction<XContentParser, ExplainResponse, IOException> entityParser =
|
||||
parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response));
|
||||
return parseEntity(response.getEntity(), entityParser);
|
||||
},
|
||||
listener, singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a request using the Ranking Evaluation API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html">Ranking Evaluation API
|
||||
|
@ -628,6 +670,32 @@ public class RestHighLevelClient implements Closeable {
|
|||
emptySet());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Executes a request using the Multi Search Template API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/multi-search-template.html">Multi Search Template API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate,
|
||||
options, MultiSearchTemplateResponse::fromXContext, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Multi Search Template API
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/multi-search-template.html">Multi Search Template API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
RequestOptions options,
|
||||
ActionListener<MultiSearchTemplateResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate,
|
||||
options, MultiSearchTemplateResponse::fromXContext, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Ranking Evaluation API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html">Ranking Evaluation API
|
||||
|
@ -726,6 +794,34 @@ public class RestHighLevelClient implements Closeable {
|
|||
FieldCapabilitiesResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch information about X-Pack from the cluster if it is installed.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/info-api.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public XPackInfoResponse xPackInfo(XPackInfoRequest request, RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(request, RequestConverters::xPackInfo, options,
|
||||
XPackInfoResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch information about X-Pack from the cluster if it is installed.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/info-api.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void xPackInfoAsync(XPackInfoRequest request, RequestOptions options,
|
||||
ActionListener<XPackInfoResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options,
|
||||
XPackInfoResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
protected final <Req extends ActionRequest, Resp> Resp performRequestAndParseEntity(Req request,
|
||||
CheckedFunction<Req, Request, IOException> requestConverter,
|
||||
RequestOptions options,
|
||||
|
|
|
@ -28,8 +28,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -164,6 +168,59 @@ public final class SnapshotClient {
|
|||
VerifyRepositoryResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a snapshot.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*/
|
||||
public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously creates a snapshot.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*/
|
||||
public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
|
||||
ActionListener<CreateSnapshotResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get snapshots.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*
|
||||
* @param getSnapshotsRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get snapshots.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*
|
||||
* @param getSnapshotsRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options, ActionListener<GetSnapshotsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a snapshot.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
|
|
|
@ -22,6 +22,8 @@ package org.elasticsearch.client;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
|
@ -42,6 +44,7 @@ import java.util.Map;
|
|||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
|
@ -112,6 +115,46 @@ public class ClusterClientIT extends ESRestHighLevelClientTestCase {
|
|||
"Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]"));
|
||||
}
|
||||
|
||||
public void testClusterGetSettings() throws IOException {
|
||||
final String transientSettingKey = RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey();
|
||||
final int transientSettingValue = 10;
|
||||
|
||||
final String persistentSettingKey = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey();
|
||||
final String persistentSettingValue = EnableAllocationDecider.Allocation.NONE.name();
|
||||
|
||||
Settings transientSettings =
|
||||
Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build();
|
||||
Settings persistentSettings = Settings.builder().put(persistentSettingKey, persistentSettingValue).build();
|
||||
clusterUpdateSettings(persistentSettings, transientSettings);
|
||||
|
||||
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest();
|
||||
ClusterGetSettingsResponse response = execute(
|
||||
request, highLevelClient().cluster()::getSettings, highLevelClient().cluster()::getSettingsAsync);
|
||||
assertEquals(persistentSettings, response.getPersistentSettings());
|
||||
assertEquals(transientSettings, response.getTransientSettings());
|
||||
assertEquals(0, response.getDefaultSettings().size());
|
||||
}
|
||||
|
||||
public void testClusterGetSettingsWithDefault() throws IOException {
|
||||
final String transientSettingKey = RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey();
|
||||
final int transientSettingValue = 10;
|
||||
|
||||
final String persistentSettingKey = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey();
|
||||
final String persistentSettingValue = EnableAllocationDecider.Allocation.NONE.name();
|
||||
|
||||
Settings transientSettings =
|
||||
Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build();
|
||||
Settings persistentSettings = Settings.builder().put(persistentSettingKey, persistentSettingValue).build();
|
||||
clusterUpdateSettings(persistentSettings, transientSettings);
|
||||
|
||||
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest().includeDefaults(true);
|
||||
ClusterGetSettingsResponse response = execute(
|
||||
request, highLevelClient().cluster()::getSettings, highLevelClient().cluster()::getSettingsAsync);
|
||||
assertEquals(persistentSettings, response.getPersistentSettings());
|
||||
assertEquals(transientSettings, response.getTransientSettings());
|
||||
assertThat(response.getDefaultSettings().size(), greaterThan(0));
|
||||
}
|
||||
|
||||
public void testClusterHealthGreen() throws IOException {
|
||||
ClusterHealthRequest request = new ClusterHealthRequest();
|
||||
request.timeout("5s");
|
||||
|
|
|
@ -20,9 +20,11 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.ingest.Pipeline;
|
||||
|
@ -126,4 +128,12 @@ public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase {
|
|||
protected static void createPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
|
||||
assertOK(client().performRequest(RequestConverters.putPipeline(putPipelineRequest)));
|
||||
}
|
||||
|
||||
protected static void clusterUpdateSettings(Settings persistentSettings,
|
||||
Settings transientSettings) throws IOException {
|
||||
ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest();
|
||||
request.persistentSettings(persistentSettings);
|
||||
request.transientSettings(transientSettings);
|
||||
assertOK(client().performRequest(RequestConverters.clusterPutSettings(request)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
|
|||
return map;
|
||||
}
|
||||
|
||||
private static AliasMetaData createAliasMetaData() {
|
||||
public static AliasMetaData createAliasMetaData() {
|
||||
AliasMetaData.Builder builder = AliasMetaData.builder(randomAlphaOfLengthBetween(3, 10));
|
||||
if (randomBoolean()) {
|
||||
builder.routing(randomAlphaOfLengthBetween(3, 10));
|
||||
|
|
|
@ -29,6 +29,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
|||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
|
@ -43,6 +45,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
|
@ -97,6 +100,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractRawValues;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
|
||||
import static org.hamcrest.CoreMatchers.hasItem;
|
||||
|
@ -110,6 +114,7 @@ import static org.hamcrest.Matchers.not;
|
|||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.core.IsInstanceOf.instanceOf;
|
||||
|
||||
public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
|
@ -324,6 +329,75 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(1, getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").size());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGetIndex() throws IOException {
|
||||
String indexName = "get_index_test";
|
||||
Settings basicSettings = Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.build();
|
||||
String mappings = "\"type-1\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}";
|
||||
createIndex(indexName, basicSettings, mappings);
|
||||
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest()
|
||||
.indices(indexName).includeDefaults(false);
|
||||
GetIndexResponse getIndexResponse =
|
||||
execute(getIndexRequest, highLevelClient().indices()::get, highLevelClient().indices()::getAsync);
|
||||
|
||||
// default settings should be null
|
||||
assertNull(getIndexResponse.getSetting(indexName, "index.refresh_interval"));
|
||||
assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS));
|
||||
assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName).get("type-1"));
|
||||
Object o = getIndexResponse.getMappings().get(indexName).get("type-1").getSourceAsMap().get("properties");
|
||||
assertThat(o, instanceOf(Map.class));
|
||||
//noinspection unchecked
|
||||
assertThat(((Map<String, Object>) o).get("field-1"), instanceOf(Map.class));
|
||||
//noinspection unchecked
|
||||
Map<String, Object> fieldMapping = (Map<String, Object>) ((Map<String, Object>) o).get("field-1");
|
||||
assertEquals("integer", fieldMapping.get("type"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGetIndexWithDefaults() throws IOException {
|
||||
String indexName = "get_index_test";
|
||||
Settings basicSettings = Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.build();
|
||||
String mappings = "\"type-1\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}";
|
||||
createIndex(indexName, basicSettings, mappings);
|
||||
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest()
|
||||
.indices(indexName).includeDefaults(true);
|
||||
GetIndexResponse getIndexResponse =
|
||||
execute(getIndexRequest, highLevelClient().indices()::get, highLevelClient().indices()::getAsync);
|
||||
|
||||
assertNotNull(getIndexResponse.getSetting(indexName, "index.refresh_interval"));
|
||||
assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL,
|
||||
getIndexResponse.defaultSettings().get(indexName).getAsTime("index.refresh_interval", null));
|
||||
assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS));
|
||||
assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName).get("type-1"));
|
||||
Object o = getIndexResponse.getMappings().get(indexName).get("type-1").getSourceAsMap().get("properties");
|
||||
assertThat(o, instanceOf(Map.class));
|
||||
assertThat(((Map<String, Object>) o).get("field-1"), instanceOf(Map.class));
|
||||
Map<String, Object> fieldMapping = (Map<String, Object>) ((Map<String, Object>) o).get("field-1");
|
||||
assertEquals("integer", fieldMapping.get("type"));
|
||||
}
|
||||
|
||||
public void testGetIndexNonExistentIndex() throws IOException {
|
||||
String nonExistentIndex = "index_that_doesnt_exist";
|
||||
assertFalse(indexExists(nonExistentIndex));
|
||||
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(nonExistentIndex);
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
|
||||
() -> execute(getIndexRequest, highLevelClient().indices()::get, highLevelClient().indices()::getAsync));
|
||||
assertEquals(RestStatus.NOT_FOUND, exception.status());
|
||||
}
|
||||
|
||||
public void testPutMapping() throws IOException {
|
||||
// Add mappings to index
|
||||
String indexName = "mapping_index";
|
||||
|
@ -1278,4 +1352,20 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
new GetIndexTemplatesRequest().names("the-template-*"), client.indices()::getTemplate, client.indices()::getTemplateAsync));
|
||||
assertThat(notFound.status(), equalTo(RestStatus.NOT_FOUND));
|
||||
}
|
||||
|
||||
public void testAnalyze() throws Exception {
|
||||
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english");
|
||||
AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync);
|
||||
|
||||
assertThat(noindexResponse.getTokens(), hasSize(3));
|
||||
|
||||
AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true);
|
||||
AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync);
|
||||
|
||||
assertNotNull(detailsResponse.detail());
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,13 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.action.main.MainResponse;
|
||||
import org.elasticsearch.protocol.license.LicenseStatus;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Map;
|
||||
|
||||
public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
||||
|
@ -31,7 +36,6 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(highLevelClient().ping(RequestOptions.DEFAULT));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testInfo() throws IOException {
|
||||
MainResponse info = highLevelClient().info(RequestOptions.DEFAULT);
|
||||
// compare with what the low level client outputs
|
||||
|
@ -41,6 +45,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
// only check node name existence, might be a different one from what was hit by low level client in multi-node cluster
|
||||
assertNotNull(info.getNodeName());
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> versionMap = (Map<String, Object>) infoAsMap.get("version");
|
||||
assertEquals(versionMap.get("build_flavor"), info.getBuild().flavor().displayName());
|
||||
assertEquals(versionMap.get("build_type"), info.getBuild().type().displayName());
|
||||
|
@ -51,4 +56,49 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(versionMap.get("lucene_version"), info.getVersion().luceneVersion.toString());
|
||||
}
|
||||
|
||||
public void testXPackInfo() throws IOException {
|
||||
XPackInfoRequest request = new XPackInfoRequest();
|
||||
request.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||
request.setVerbose(true);
|
||||
XPackInfoResponse info = highLevelClient().xPackInfo(request, RequestOptions.DEFAULT);
|
||||
|
||||
MainResponse mainResponse = highLevelClient().info(RequestOptions.DEFAULT);
|
||||
|
||||
assertEquals(mainResponse.getBuild().shortHash(), info.getBuildInfo().getHash());
|
||||
|
||||
assertEquals("basic", info.getLicenseInfo().getType());
|
||||
assertEquals("basic", info.getLicenseInfo().getMode());
|
||||
assertEquals(LicenseStatus.ACTIVE, info.getLicenseInfo().getStatus());
|
||||
|
||||
FeatureSet graph = info.getFeatureSetsInfo().getFeatureSets().get("graph");
|
||||
assertNotNull(graph.description());
|
||||
assertFalse(graph.available());
|
||||
assertTrue(graph.enabled());
|
||||
assertNull(graph.nativeCodeInfo());
|
||||
FeatureSet monitoring = info.getFeatureSetsInfo().getFeatureSets().get("monitoring");
|
||||
assertNotNull(monitoring.description());
|
||||
assertTrue(monitoring.available());
|
||||
assertTrue(monitoring.enabled());
|
||||
assertNull(monitoring.nativeCodeInfo());
|
||||
FeatureSet ml = info.getFeatureSetsInfo().getFeatureSets().get("ml");
|
||||
assertNotNull(ml.description());
|
||||
assertFalse(ml.available());
|
||||
assertTrue(ml.enabled());
|
||||
assertEquals(mainResponse.getVersion().toString(),
|
||||
ml.nativeCodeInfo().get("version").toString().replace("-SNAPSHOT", ""));
|
||||
}
|
||||
|
||||
public void testXPackInfoEmptyRequest() throws IOException {
|
||||
XPackInfoResponse info = highLevelClient().xPackInfo(new XPackInfoRequest(), RequestOptions.DEFAULT);
|
||||
|
||||
/*
|
||||
* The default in the transport client is non-verbose and returning
|
||||
* no categories which is the opposite of the default when you use
|
||||
* the API over REST. We don't want to break the transport client
|
||||
* even though it doesn't feel like a good default.
|
||||
*/
|
||||
assertNull(info.getBuildInfo());
|
||||
assertNull(info.getLicenseInfo());
|
||||
assertNull(info.getFeatureSetsInfo());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,14 +36,18 @@ import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteReposito
|
|||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
|
@ -68,6 +72,7 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryReques
|
|||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
|
@ -111,15 +116,18 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.RandomCreateIndexGenerator;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.rankeval.PrecisionAtK;
|
||||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
import org.elasticsearch.index.rankeval.RankEvalSpec;
|
||||
import org.elasticsearch.index.rankeval.RatedRequest;
|
||||
import org.elasticsearch.index.rankeval.RestRankEvalAction;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
|
@ -143,6 +151,7 @@ import java.nio.file.Path;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -577,6 +586,39 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testGetIndex() throws IOException {
|
||||
String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5);
|
||||
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indicesUnderTest);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomMasterTimeout(getIndexRequest, expectedParams);
|
||||
setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams);
|
||||
setRandomLocal(getIndexRequest, expectedParams);
|
||||
setRandomHumanReadable(getIndexRequest, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
// the request object will not have include_defaults present unless it is set to
|
||||
// true
|
||||
getIndexRequest.includeDefaults(randomBoolean());
|
||||
if (getIndexRequest.includeDefaults()) {
|
||||
expectedParams.put("include_defaults", Boolean.toString(true));
|
||||
}
|
||||
}
|
||||
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
|
||||
endpoint.add(String.join(",", indicesUnderTest));
|
||||
}
|
||||
|
||||
Request request = RequestConverters.getIndex(getIndexRequest);
|
||||
|
||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testDeleteIndexEmptyIndices() {
|
||||
String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
||||
ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate();
|
||||
|
@ -1370,6 +1412,52 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertToXContentBody(searchTemplateRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplate() throws Exception {
|
||||
final int numSearchRequests = randomIntBetween(1, 10);
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
for (int i = 0; i < numSearchRequests; i++) {
|
||||
// Create a random request.
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
SearchRequest searchRequest = new SearchRequest(indices);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomSearchParams(searchRequest, expectedParams);
|
||||
|
||||
// scroll is not supported in the current msearch or msearchtemplate api, so unset it:
|
||||
searchRequest.scroll((Scroll) null);
|
||||
// batched reduce size is currently not set-able on a per-request basis as it is a query string parameter only
|
||||
searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE);
|
||||
|
||||
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
|
||||
|
||||
SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest);
|
||||
|
||||
searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}");
|
||||
searchTemplateRequest.setScriptType(ScriptType.INLINE);
|
||||
searchTemplateRequest.setProfile(randomBoolean());
|
||||
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("field", "name");
|
||||
scriptParams.put("value", randomAlphaOfLengthBetween(2, 5));
|
||||
searchTemplateRequest.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(searchTemplateRequest);
|
||||
}
|
||||
|
||||
Request multiRequest = RequestConverters.multiSearchTemplate(multiSearchTemplateRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, multiRequest.getMethod());
|
||||
assertEquals("/_msearch/template", multiRequest.getEndpoint());
|
||||
List<SearchTemplateRequest> searchRequests = multiSearchTemplateRequest.requests();
|
||||
assertEquals(numSearchRequests, searchRequests.size());
|
||||
|
||||
HttpEntity actualEntity = multiRequest.getEntity();
|
||||
byte[] expectedBytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent());
|
||||
assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue());
|
||||
assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity)));
|
||||
}
|
||||
|
||||
public void testExistsAlias() {
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
||||
|
@ -1418,6 +1506,49 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testExplain() throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String type = randomAlphaOfLengthBetween(3, 10);
|
||||
String id = randomAlphaOfLengthBetween(3, 10);
|
||||
|
||||
ExplainRequest explainRequest = new ExplainRequest(index, type, id);
|
||||
explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10)));
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
||||
if (randomBoolean()) {
|
||||
String routing = randomAlphaOfLengthBetween(3, 10);
|
||||
explainRequest.routing(routing);
|
||||
expectedParams.put("routing", routing);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String preference = randomAlphaOfLengthBetween(3, 10);
|
||||
explainRequest.preference(preference);
|
||||
expectedParams.put("preference", preference);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String[] storedFields = generateRandomStringArray(10, 5, false, false);
|
||||
String storedFieldsParams = randomFields(storedFields);
|
||||
explainRequest.storedFields(storedFields);
|
||||
expectedParams.put("stored_fields", storedFieldsParams);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
randomizeFetchSourceContextParams(explainRequest::fetchSourceContext, expectedParams);
|
||||
}
|
||||
|
||||
Request request = RequestConverters.explain(explainRequest);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add(index)
|
||||
.add(type)
|
||||
.add(id)
|
||||
.add("_explain");
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertToXContentBody(explainRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testFieldCaps() {
|
||||
// Create a random request.
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
|
@ -1542,6 +1673,21 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testClusterGetSettings() throws IOException {
|
||||
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest();
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomMasterTimeout(request, expectedParams);
|
||||
request.includeDefaults(randomBoolean());
|
||||
if (request.includeDefaults()) {
|
||||
expectedParams.put("include_defaults", String.valueOf(true));
|
||||
}
|
||||
|
||||
Request expectedRequest = RequestConverters.clusterGetSettings(request);
|
||||
assertEquals("/_cluster/settings", expectedRequest.getEndpoint());
|
||||
assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod());
|
||||
assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testPutPipeline() throws IOException {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
PutPipelineRequest request = new PutPipelineRequest(
|
||||
|
@ -1943,6 +2089,88 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
}
|
||||
|
||||
public void testCreateSnapshot() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0];
|
||||
String endpoint = "/_snapshot/" + repository + "/" + snapshot;
|
||||
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
setRandomMasterTimeout(createSnapshotRequest, expectedParams);
|
||||
Boolean waitForCompletion = randomBoolean();
|
||||
createSnapshotRequest.waitForCompletion(waitForCompletion);
|
||||
|
||||
if (waitForCompletion) {
|
||||
expectedParams.put("wait_for_completion", waitForCompletion.toString());
|
||||
}
|
||||
|
||||
Request request = RequestConverters.createSnapshot(createSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertToXContentBody(createSnapshotRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest();
|
||||
getSnapshotsRequest.repository(repository);
|
||||
getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0]));
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
} else {
|
||||
expectedParams.put("ignore_unavailable", Boolean.FALSE.toString());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
} else {
|
||||
expectedParams.put("verbose", Boolean.TRUE.toString());
|
||||
}
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetAllSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository);
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteSnapshot() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
|
@ -2055,6 +2283,22 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testAnalyzeRequest() throws Exception {
|
||||
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
|
||||
.text("Here is some text")
|
||||
.index("test_index")
|
||||
.analyzer("test_analyzer");
|
||||
|
||||
Request request = RequestConverters.analyze(indexAnalyzeRequest);
|
||||
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
|
||||
assertToXContentBody(indexAnalyzeRequest, request.getEntity());
|
||||
|
||||
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
|
||||
.text("more text")
|
||||
.analyzer("test_analyzer");
|
||||
assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
|
||||
}
|
||||
|
||||
public void testGetScriptRequest() {
|
||||
GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script");
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
@ -2223,6 +2467,37 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
+ "previous requests have content-type [" + xContentType + "]", exception.getMessage());
|
||||
}
|
||||
|
||||
public void testXPackInfo() {
|
||||
XPackInfoRequest infoRequest = new XPackInfoRequest();
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
infoRequest.setVerbose(randomBoolean());
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
expectedParams.put("human", "false");
|
||||
}
|
||||
int option = between(0, 2);
|
||||
switch (option) {
|
||||
case 0:
|
||||
infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||
break;
|
||||
case 1:
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES));
|
||||
expectedParams.put("categories", "features");
|
||||
break;
|
||||
case 2:
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD));
|
||||
expectedParams.put("categories", "build,features");
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid option [" + option + "]");
|
||||
}
|
||||
|
||||
Request request = RequestConverters.xPackInfo(infoRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack", request.getEndpoint());
|
||||
assertNull(request.getEntity());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
|
||||
/**
|
||||
* Randomize the {@link FetchSourceContext} request parameters.
|
||||
*/
|
||||
|
@ -2264,7 +2539,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
expectedParams.put("preference", searchRequest.preference());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
searchRequest.searchType(randomFrom(SearchType.values()));
|
||||
searchRequest.searchType(randomFrom(SearchType.CURRENTLY_SUPPORTED));
|
||||
}
|
||||
expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.apache.http.entity.StringEntity;
|
|||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
|
@ -44,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.ScriptQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||
import org.elasticsearch.join.aggregations.Children;
|
||||
|
@ -51,6 +54,9 @@ import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse.Item;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateResponse;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -63,6 +69,7 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats;
|
|||
import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
|
@ -135,7 +142,44 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc);
|
||||
doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc);
|
||||
client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3/_refresh");
|
||||
|
||||
mappings = new StringEntity(
|
||||
"{" +
|
||||
" \"mappings\": {" +
|
||||
" \"doc\": {" +
|
||||
" \"properties\": {" +
|
||||
" \"field1\": {" +
|
||||
" \"type\": \"keyword\"," +
|
||||
" \"store\": true" +
|
||||
" }," +
|
||||
" \"field2\": {" +
|
||||
" \"type\": \"keyword\"," +
|
||||
" \"store\": true" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}}",
|
||||
ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings);
|
||||
doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc);
|
||||
StringEntity aliasFilter = new StringEntity(
|
||||
"{" +
|
||||
" \"actions\" : [" +
|
||||
" {" +
|
||||
" \"add\" : {" +
|
||||
" \"index\" : \"index4\"," +
|
||||
" \"alias\" : \"alias4\"," +
|
||||
" \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" ]" +
|
||||
"}",
|
||||
ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter);
|
||||
|
||||
client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh");
|
||||
}
|
||||
|
||||
public void testSearchNoQuery() throws IOException {
|
||||
|
@ -834,6 +878,273 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON);
|
||||
}
|
||||
|
||||
|
||||
public void testMultiSearchTemplate() throws Exception {
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
SearchTemplateRequest goodRequest = new SearchTemplateRequest();
|
||||
goodRequest.setRequest(new SearchRequest("index"));
|
||||
goodRequest.setScriptType(ScriptType.INLINE);
|
||||
goodRequest.setScript(
|
||||
"{" +
|
||||
" \"query\": {" +
|
||||
" \"match\": {" +
|
||||
" \"num\": {{number}}" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", 10);
|
||||
goodRequest.setScriptParams(scriptParams);
|
||||
goodRequest.setExplain(true);
|
||||
goodRequest.setProfile(true);
|
||||
multiSearchTemplateRequest.add(goodRequest);
|
||||
|
||||
|
||||
SearchTemplateRequest badRequest = new SearchTemplateRequest();
|
||||
badRequest.setRequest(new SearchRequest("index"));
|
||||
badRequest.setScriptType(ScriptType.INLINE);
|
||||
badRequest.setScript("{ NOT VALID JSON {{number}} }");
|
||||
scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", 10);
|
||||
badRequest.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(badRequest);
|
||||
|
||||
MultiSearchTemplateResponse multiSearchTemplateResponse =
|
||||
execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
|
||||
highLevelClient()::multiSearchTemplateAsync);
|
||||
|
||||
Item[] responses = multiSearchTemplateResponse.getResponses();
|
||||
|
||||
assertEquals(2, responses.length);
|
||||
|
||||
|
||||
assertNull(responses[0].getResponse().getSource());
|
||||
SearchResponse goodResponse =responses[0].getResponse().getResponse();
|
||||
assertNotNull(goodResponse);
|
||||
assertThat(responses[0].isFailure(), Matchers.is(false));
|
||||
assertEquals(1, goodResponse.getHits().totalHits);
|
||||
assertEquals(1, goodResponse.getHits().getHits().length);
|
||||
assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f));
|
||||
SearchHit hit = goodResponse.getHits().getHits()[0];
|
||||
assertNotNull(hit.getExplanation());
|
||||
assertFalse(goodResponse.getProfileResults().isEmpty());
|
||||
|
||||
|
||||
assertNull(responses[0].getResponse().getSource());
|
||||
assertThat(responses[1].isFailure(), Matchers.is(true));
|
||||
assertNotNull(responses[1].getFailureMessage());
|
||||
assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception"));
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateAllBad() throws Exception {
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
SearchTemplateRequest badRequest1 = new SearchTemplateRequest();
|
||||
badRequest1.setRequest(new SearchRequest("index"));
|
||||
badRequest1.setScriptType(ScriptType.INLINE);
|
||||
badRequest1.setScript(
|
||||
"{" +
|
||||
" \"query\": {" +
|
||||
" \"match\": {" +
|
||||
" \"num\": {{number}}" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", "BAD NUMBER");
|
||||
badRequest1.setScriptParams(scriptParams);
|
||||
multiSearchTemplateRequest.add(badRequest1);
|
||||
|
||||
|
||||
SearchTemplateRequest badRequest2 = new SearchTemplateRequest();
|
||||
badRequest2.setRequest(new SearchRequest("index"));
|
||||
badRequest2.setScriptType(ScriptType.INLINE);
|
||||
badRequest2.setScript("BAD QUERY TEMPLATE");
|
||||
scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", "BAD NUMBER");
|
||||
badRequest2.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(badRequest2);
|
||||
|
||||
// The whole HTTP request should fail if no nested search requests are valid
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
|
||||
highLevelClient()::multiSearchTemplateAsync));
|
||||
|
||||
assertEquals(RestStatus.BAD_REQUEST, exception.status());
|
||||
assertThat(exception.getMessage(), containsString("no requests added"));
|
||||
}
|
||||
|
||||
public void testExplain() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.termQuery("field", "value1"));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), greaterThan(0.0f));
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.termQuery("field", "value2"));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery("field", "value1"))
|
||||
.must(QueryBuilders.termQuery("field", "value2")));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getDetails().length, equalTo(2));
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainNonExistent() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("non_existent_index", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchQuery("field", "value"));
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
|
||||
() -> execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync));
|
||||
assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND));
|
||||
assertThat(exception.getIndex().getName(), equalTo("non_existent_index"));
|
||||
assertThat(exception.getDetailedMessage(),
|
||||
containsString("Elasticsearch exception [type=index_not_found_exception, reason=no such index]"));
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "999");
|
||||
explainRequest.query(QueryBuilders.matchQuery("field", "value1"));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(explainResponse.getId(), equalTo("999"));
|
||||
assertFalse(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
assertFalse(explainResponse.hasExplanation());
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainWithStoredFields() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.storedFields(new String[]{"field1"});
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getFields().keySet(), equalTo(Collections.singleton("field1")));
|
||||
assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1"));
|
||||
assertTrue(explainResponse.getGetResult().isSourceEmpty());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.storedFields(new String[]{"field1", "field2"});
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getFields().keySet().size(), equalTo(2));
|
||||
assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1"));
|
||||
assertThat(explainResponse.getGetResult().getFields().get("field2").getValue().toString(), equalTo("value2"));
|
||||
assertTrue(explainResponse.getGetResult().isSourceEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainWithFetchSource() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, null));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1")));
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.fetchSourceContext(new FetchSourceContext(true, null, new String[] {"field2"}));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1")));
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainWithAliasFilter() throws IOException {
|
||||
ExplainRequest explainRequest = new ExplainRequest("alias4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
}
|
||||
|
||||
public void testFieldCaps() throws IOException {
|
||||
FieldCapabilitiesRequest request = new FieldCapabilitiesRequest()
|
||||
|
|
|
@ -28,15 +28,20 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
||||
|
@ -49,12 +54,12 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
highLevelClient().snapshot()::createRepositoryAsync);
|
||||
}
|
||||
|
||||
private Response createTestSnapshot(String repository, String snapshot) throws IOException {
|
||||
Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repository, snapshot));
|
||||
createSnapshot.addParameter("wait_for_completion", "true");
|
||||
return highLevelClient().getLowLevelClient().performRequest(createSnapshot);
|
||||
}
|
||||
private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
// assumes the repository already exists
|
||||
|
||||
return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot,
|
||||
highLevelClient().snapshot()::createSnapshotAsync);
|
||||
}
|
||||
|
||||
public void testCreateRepository() throws IOException {
|
||||
PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}");
|
||||
|
@ -119,6 +124,55 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(response.getNodes().size(), equalTo(1));
|
||||
}
|
||||
|
||||
public void testCreateSnapshot() throws IOException {
|
||||
String repository = "test_repository";
|
||||
assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged());
|
||||
|
||||
String snapshot = "test_snapshot";
|
||||
CreateSnapshotRequest request = new CreateSnapshotRequest(repository, snapshot);
|
||||
boolean waitForCompletion = randomBoolean();
|
||||
request.waitForCompletion(waitForCompletion);
|
||||
request.partial(randomBoolean());
|
||||
request.includeGlobalState(randomBoolean());
|
||||
|
||||
CreateSnapshotResponse response = createTestSnapshot(request);
|
||||
assertEquals(waitForCompletion ? RestStatus.OK : RestStatus.ACCEPTED, response.status());
|
||||
}
|
||||
|
||||
public void testGetSnapshots() throws IOException {
|
||||
String repository = "test_repository";
|
||||
String snapshot1 = "test_snapshot1";
|
||||
String snapshot2 = "test_snapshot2";
|
||||
|
||||
PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}");
|
||||
assertTrue(putRepositoryResponse.isAcknowledged());
|
||||
|
||||
CreateSnapshotRequest createSnapshotRequest1 = new CreateSnapshotRequest(repository, snapshot1);
|
||||
createSnapshotRequest1.waitForCompletion(true);
|
||||
CreateSnapshotResponse putSnapshotResponse1 = createTestSnapshot(createSnapshotRequest1);
|
||||
CreateSnapshotRequest createSnapshotRequest2 = new CreateSnapshotRequest(repository, snapshot2);
|
||||
createSnapshotRequest2.waitForCompletion(true);
|
||||
CreateSnapshotResponse putSnapshotResponse2 = createTestSnapshot(createSnapshotRequest2);
|
||||
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
|
||||
assertEquals(RestStatus.OK, putSnapshotResponse1.status());
|
||||
assertEquals(RestStatus.OK, putSnapshotResponse2.status());
|
||||
|
||||
GetSnapshotsRequest request;
|
||||
if (randomBoolean()) {
|
||||
request = new GetSnapshotsRequest(repository);
|
||||
} else if (randomBoolean()) {
|
||||
request = new GetSnapshotsRequest(repository, new String[] {"_all"});
|
||||
|
||||
} else {
|
||||
request = new GetSnapshotsRequest(repository, new String[] {snapshot1, snapshot2});
|
||||
}
|
||||
GetSnapshotsResponse response = execute(request, highLevelClient().snapshot()::get, highLevelClient().snapshot()::getAsync);
|
||||
|
||||
assertEquals(2, response.getSnapshots().size());
|
||||
assertThat(response.getSnapshots().stream().map((s) -> s.snapshotId().getName()).collect(Collectors.toList()),
|
||||
contains("test_snapshot1", "test_snapshot2"));
|
||||
}
|
||||
|
||||
public void testDeleteSnapshot() throws IOException {
|
||||
String repository = "test_repository";
|
||||
String snapshot = "test_snapshot";
|
||||
|
@ -126,9 +180,11 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}");
|
||||
assertTrue(putRepositoryResponse.isAcknowledged());
|
||||
|
||||
Response putSnapshotResponse = createTestSnapshot(repository, snapshot);
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
createSnapshotRequest.waitForCompletion(true);
|
||||
CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
|
||||
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
|
||||
assertEquals(200, putSnapshotResponse.getStatusLine().getStatusCode());
|
||||
assertEquals(RestStatus.OK, createSnapshotResponse.status());
|
||||
|
||||
DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot);
|
||||
DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync);
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
|
@ -49,6 +51,7 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
|
@ -189,6 +192,71 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
public void testClusterGetSettings() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::get-settings-request
|
||||
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest();
|
||||
// end::get-settings-request
|
||||
|
||||
// tag::get-settings-request-includeDefaults
|
||||
request.includeDefaults(true); // <1>
|
||||
// end::get-settings-request-includeDefaults
|
||||
|
||||
// tag::get-settings-request-local
|
||||
request.local(true); // <1>
|
||||
// end::get-settings-request-local
|
||||
|
||||
// tag::get-settings-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::get-settings-request-masterTimeout
|
||||
|
||||
// tag::get-settings-execute
|
||||
ClusterGetSettingsResponse response = client.cluster().getSettings(request, RequestOptions.DEFAULT); // <1>
|
||||
// end::get-settings-execute
|
||||
|
||||
// tag::get-settings-response
|
||||
Settings persistentSettings = response.getPersistentSettings(); // <1>
|
||||
Settings transientSettings = response.getTransientSettings(); // <2>
|
||||
Settings defaultSettings = response.getDefaultSettings(); // <3>
|
||||
String settingValue = response.getSetting("cluster.routing.allocation.enable"); // <4>
|
||||
// end::get-settings-response
|
||||
|
||||
assertThat(defaultSettings.size(), greaterThan(0));
|
||||
}
|
||||
|
||||
public void testClusterGetSettingsAsync() throws InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest();
|
||||
|
||||
// tag::get-settings-execute-listener
|
||||
ActionListener<ClusterGetSettingsResponse> listener =
|
||||
new ActionListener<ClusterGetSettingsResponse>() {
|
||||
@Override
|
||||
public void onResponse(ClusterGetSettingsResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-settings-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-settings-execute-async
|
||||
client.cluster().getSettingsAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-settings-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testClusterHealth() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
client.indices().create(new CreateIndexRequest("index"), RequestOptions.DEFAULT);
|
||||
|
|
|
@ -27,6 +27,9 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
|||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
|
@ -41,6 +44,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
|
@ -86,12 +90,14 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -1232,6 +1238,81 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testGetIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
Settings settings = Settings.builder().put("number_of_shards", 3).build();
|
||||
String mappings = "{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}";
|
||||
CreateIndexResponse createIndexResponse = client.indices().create(
|
||||
new CreateIndexRequest("index", settings).mapping("doc", mappings, XContentType.JSON),
|
||||
RequestOptions.DEFAULT);
|
||||
assertTrue(createIndexResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
// tag::get-index-request
|
||||
GetIndexRequest request = new GetIndexRequest().indices("index"); // <1>
|
||||
// end::get-index-request
|
||||
|
||||
// tag::get-index-request-indicesOptions
|
||||
request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
|
||||
// end::get-index-request-indicesOptions
|
||||
|
||||
// tag::get-index-request-includeDefaults
|
||||
request.includeDefaults(true); // <1>
|
||||
// end::get-index-request-includeDefaults
|
||||
|
||||
// tag::get-index-execute
|
||||
GetIndexResponse getIndexResponse = client.indices().get(request, RequestOptions.DEFAULT);
|
||||
// end::get-index-execute
|
||||
|
||||
// tag::get-index-response
|
||||
ImmutableOpenMap<String, MappingMetaData> indexMappings = getIndexResponse.getMappings().get("index"); // <1>
|
||||
Map<String, Object> indexTypeMappings = indexMappings.get("doc").getSourceAsMap(); // <2>
|
||||
List<AliasMetaData> indexAliases = getIndexResponse.getAliases().get("index"); // <3>
|
||||
String numberOfShardsString = getIndexResponse.getSetting("index", "index.number_of_shards"); // <4>
|
||||
Settings indexSettings = getIndexResponse.getSettings().get("index"); // <5>
|
||||
Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); // <6>
|
||||
TimeValue time = getIndexResponse.defaultSettings().get("index")
|
||||
.getAsTime("index.refresh_interval", null); // <7>
|
||||
// end::get-index-response
|
||||
|
||||
assertEquals(
|
||||
Collections.singletonMap("properties",
|
||||
Collections.singletonMap("field-1", Collections.singletonMap("type", "integer"))),
|
||||
indexTypeMappings
|
||||
);
|
||||
assertTrue(indexAliases.isEmpty());
|
||||
assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL, time);
|
||||
assertEquals("3", numberOfShardsString);
|
||||
assertEquals(Integer.valueOf(3), numberOfShards);
|
||||
|
||||
// tag::get-index-execute-listener
|
||||
ActionListener<GetIndexResponse> listener =
|
||||
new ActionListener<GetIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetIndexResponse getIndexResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-index-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-index-execute-async
|
||||
client.indices().getAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-index-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testForceMergeIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -2317,4 +2398,127 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testAnalyze() throws IOException, InterruptedException {
|
||||
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
// tag::analyze-builtin-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("Some text to analyze", "Some more text to analyze"); // <1>
|
||||
request.analyzer("english"); // <2>
|
||||
// end::analyze-builtin-request
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-custom-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("<b>Some text to analyze</b>");
|
||||
request.addCharFilter("html_strip"); // <1>
|
||||
request.tokenizer("standard"); // <2>
|
||||
request.addTokenFilter("lowercase"); // <3>
|
||||
|
||||
Map<String, Object> stopFilter = new HashMap<>();
|
||||
stopFilter.put("type", "stop");
|
||||
stopFilter.put("stopwords", new String[]{ "to" }); // <4>
|
||||
request.addTokenFilter(stopFilter); // <5>
|
||||
// end::analyze-custom-request
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-custom-normalizer-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("<b>BaR</b>");
|
||||
request.addTokenFilter("lowercase");
|
||||
// end::analyze-custom-normalizer-request
|
||||
|
||||
// tag::analyze-request-explain
|
||||
request.explain(true); // <1>
|
||||
request.attributes("keyword", "type"); // <2>
|
||||
// end::analyze-request-explain
|
||||
|
||||
// tag::analyze-request-sync
|
||||
AnalyzeResponse response = client.indices().analyze(request, RequestOptions.DEFAULT);
|
||||
// end::analyze-request-sync
|
||||
|
||||
// tag::analyze-response-tokens
|
||||
List<AnalyzeResponse.AnalyzeToken> tokens = response.getTokens(); // <1>
|
||||
// end::analyze-response-tokens
|
||||
// tag::analyze-response-detail
|
||||
DetailAnalyzeResponse detail = response.detail(); // <1>
|
||||
// end::analyze-response-detail
|
||||
|
||||
assertNull(tokens);
|
||||
assertNotNull(detail.tokenizer());
|
||||
}
|
||||
|
||||
CreateIndexRequest req = new CreateIndexRequest("my_index");
|
||||
CreateIndexResponse resp = client.indices().create(req, RequestOptions.DEFAULT);
|
||||
assertTrue(resp.isAcknowledged());
|
||||
|
||||
PutMappingRequest pmReq = new PutMappingRequest()
|
||||
.indices("my_index")
|
||||
.type("_doc")
|
||||
.source("my_field", "type=text,analyzer=english");
|
||||
PutMappingResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT);
|
||||
assertTrue(pmResp.isAcknowledged());
|
||||
|
||||
{
|
||||
// tag::analyze-index-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.index("my_index"); // <1>
|
||||
request.analyzer("my_analyzer"); // <2>
|
||||
request.text("some text to analyze");
|
||||
// end::analyze-index-request
|
||||
|
||||
// tag::analyze-execute-listener
|
||||
ActionListener<AnalyzeResponse> listener = new ActionListener<AnalyzeResponse>() {
|
||||
@Override
|
||||
public void onResponse(AnalyzeResponse analyzeTokens) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
|
||||
}
|
||||
};
|
||||
// end::analyze-execute-listener
|
||||
|
||||
// use a built-in analyzer in the test
|
||||
request = new AnalyzeRequest();
|
||||
request.index("my_index");
|
||||
request.field("my_field");
|
||||
request.text("some text to analyze");
|
||||
// Use a blocking listener in the test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::analyze-request-async
|
||||
client.indices().analyzeAsync(request, RequestOptions.DEFAULT, listener);
|
||||
// end::analyze-request-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-index-normalizer-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.index("my_index"); // <1>
|
||||
request.normalizer("my_normalizer"); // <2>
|
||||
request.text("some text to analyze");
|
||||
// end::analyze-index-normalizer-request
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-field-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.index("my_index");
|
||||
request.field("my_field");
|
||||
request.text("some text to analyze");
|
||||
// end::analyze-field-request
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,14 +22,24 @@ package org.elasticsearch.client.documentation;
|
|||
import org.apache.http.HttpHost;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.main.MainResponse;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Documentation for miscellaneous APIs in the high level java client.
|
||||
|
@ -66,6 +76,59 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertTrue(response);
|
||||
}
|
||||
|
||||
public void testXPackInfo() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
//tag::x-pack-info-execute
|
||||
XPackInfoRequest request = new XPackInfoRequest();
|
||||
request.setVerbose(true); // <1>
|
||||
request.setCategories(EnumSet.of( // <2>
|
||||
XPackInfoRequest.Category.BUILD,
|
||||
XPackInfoRequest.Category.LICENSE,
|
||||
XPackInfoRequest.Category.FEATURES));
|
||||
XPackInfoResponse response = client.xPackInfo(request, RequestOptions.DEFAULT);
|
||||
//end::x-pack-info-execute
|
||||
|
||||
//tag::x-pack-info-response
|
||||
BuildInfo build = response.getBuildInfo(); // <1>
|
||||
LicenseInfo license = response.getLicenseInfo(); // <2>
|
||||
assertEquals(XPackInfoResponse.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS,
|
||||
license.getExpiryDate()); // <3>
|
||||
FeatureSetsInfo features = response.getFeatureSetsInfo(); // <4>
|
||||
//end::x-pack-info-response
|
||||
|
||||
assertNotNull(response.getBuildInfo());
|
||||
assertNotNull(response.getLicenseInfo());
|
||||
assertNotNull(response.getFeatureSetsInfo());
|
||||
}
|
||||
{
|
||||
XPackInfoRequest request = new XPackInfoRequest();
|
||||
// tag::x-pack-info-execute-listener
|
||||
ActionListener<XPackInfoResponse> listener = new ActionListener<XPackInfoResponse>() {
|
||||
@Override
|
||||
public void onResponse(XPackInfoResponse indexResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-info-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-info-execute-async
|
||||
client.xPackInfoAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-info-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testInitializationFromClientBuilder() throws IOException {
|
||||
//tag::rest-high-level-client-init
|
||||
RestHighLevelClient client = new RestHighLevelClient(
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
|
@ -47,10 +50,12 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
|
@ -66,6 +71,9 @@ import org.elasticsearch.index.rankeval.RatedRequest;
|
|||
import org.elasticsearch.index.rankeval.RatedSearchHit;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse.Item;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateResponse;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
|
@ -80,6 +88,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||
import org.elasticsearch.search.profile.ProfileResult;
|
||||
|
@ -767,21 +776,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
RestClient restClient = client();
|
||||
|
||||
// tag::register-script
|
||||
Request scriptRequest = new Request("POST", "_scripts/title_search");
|
||||
scriptRequest.setJsonEntity(
|
||||
"{" +
|
||||
" \"script\": {" +
|
||||
" \"lang\": \"mustache\"," +
|
||||
" \"source\": {" +
|
||||
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
|
||||
" \"size\" : \"{{size}}\"" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Response scriptResponse = restClient.performRequest(scriptRequest);
|
||||
// end::register-script
|
||||
assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode());
|
||||
registerQueryScript(restClient);
|
||||
|
||||
// tag::search-template-request-stored
|
||||
SearchTemplateRequest request = new SearchTemplateRequest();
|
||||
|
@ -834,6 +829,223 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateWithInlineScript() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::multi-search-template-request-inline
|
||||
String [] searchTerms = {"elasticsearch", "logstash", "kibana"};
|
||||
|
||||
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); // <1>
|
||||
for (String searchTerm : searchTerms) {
|
||||
SearchTemplateRequest request = new SearchTemplateRequest(); // <2>
|
||||
request.setRequest(new SearchRequest("posts"));
|
||||
|
||||
request.setScriptType(ScriptType.INLINE);
|
||||
request.setScript(
|
||||
"{" +
|
||||
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
|
||||
" \"size\" : \"{{size}}\"" +
|
||||
"}");
|
||||
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("field", "title");
|
||||
scriptParams.put("value", searchTerm);
|
||||
scriptParams.put("size", 5);
|
||||
request.setScriptParams(scriptParams);
|
||||
|
||||
multiRequest.add(request); // <3>
|
||||
}
|
||||
// end::multi-search-template-request-inline
|
||||
|
||||
// tag::multi-search-template-request-sync
|
||||
MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT);
|
||||
// end::multi-search-template-request-sync
|
||||
|
||||
// tag::multi-search-template-response
|
||||
for (Item item : multiResponse.getResponses()) { // <1>
|
||||
if (item.isFailure()) {
|
||||
String error = item.getFailureMessage(); // <2>
|
||||
} else {
|
||||
SearchTemplateResponse searchTemplateResponse = item.getResponse(); // <3>
|
||||
SearchResponse searchResponse = searchTemplateResponse.getResponse();
|
||||
searchResponse.getHits();
|
||||
}
|
||||
}
|
||||
// end::multi-search-template-response
|
||||
|
||||
assertNotNull(multiResponse);
|
||||
assertEquals(searchTerms.length, multiResponse.getResponses().length);
|
||||
assertNotNull(multiResponse.getResponses()[0]);
|
||||
SearchResponse searchResponse = multiResponse.getResponses()[0].getResponse().getResponse();
|
||||
assertTrue(searchResponse.getHits().totalHits > 0);
|
||||
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateWithStoredScript() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
RestClient restClient = client();
|
||||
|
||||
registerQueryScript(restClient);
|
||||
|
||||
// tag::multi-search-template-request-stored
|
||||
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
String [] searchTerms = {"elasticsearch", "logstash", "kibana"};
|
||||
for (String searchTerm : searchTerms) {
|
||||
|
||||
SearchTemplateRequest request = new SearchTemplateRequest();
|
||||
request.setRequest(new SearchRequest("posts"));
|
||||
|
||||
request.setScriptType(ScriptType.STORED);
|
||||
request.setScript("title_search");
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("field", "title");
|
||||
params.put("value", searchTerm);
|
||||
params.put("size", 5);
|
||||
request.setScriptParams(params);
|
||||
multiRequest.add(request);
|
||||
}
|
||||
// end::multi-search-template-request-stored
|
||||
|
||||
|
||||
|
||||
|
||||
// tag::multi-search-template-execute
|
||||
MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT);
|
||||
// end::multi-search-template-execute
|
||||
|
||||
assertNotNull(multiResponse);
|
||||
assertEquals(searchTerms.length, multiResponse.getResponses().length);
|
||||
assertNotNull(multiResponse.getResponses()[0]);
|
||||
SearchResponse searchResponse = multiResponse.getResponses()[0].getResponse().getResponse();
|
||||
assertTrue(searchResponse.getHits().totalHits > 0);
|
||||
|
||||
// tag::multi-search-template-execute-listener
|
||||
ActionListener<MultiSearchTemplateResponse> listener = new ActionListener<MultiSearchTemplateResponse>() {
|
||||
@Override
|
||||
public void onResponse(MultiSearchTemplateResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::multi-search-template-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener for tests.
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::multi-search-template-execute-async
|
||||
client.multiSearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener);
|
||||
// end::multi-search-template-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
protected void registerQueryScript(RestClient restClient) throws IOException {
|
||||
// tag::register-script
|
||||
Request scriptRequest = new Request("POST", "_scripts/title_search");
|
||||
scriptRequest.setJsonEntity(
|
||||
"{" +
|
||||
" \"script\": {" +
|
||||
" \"lang\": \"mustache\"," +
|
||||
" \"source\": {" +
|
||||
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
|
||||
" \"size\" : \"{{size}}\"" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Response scriptResponse = restClient.performRequest(scriptRequest);
|
||||
// end::register-script
|
||||
assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
|
||||
public void testExplain() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::explain-request
|
||||
ExplainRequest request = new ExplainRequest("contributors", "doc", "1");
|
||||
request.query(QueryBuilders.termQuery("user", "tanguy"));
|
||||
// end::explain-request
|
||||
|
||||
// tag::explain-request-routing
|
||||
request.routing("routing"); // <1>
|
||||
// end::explain-request-routing
|
||||
|
||||
// tag::explain-request-preference
|
||||
request.preference("_local"); // <1>
|
||||
// end::explain-request-preference
|
||||
|
||||
// tag::explain-request-source
|
||||
request.fetchSourceContext(new FetchSourceContext(true, new String[]{"user"}, null)); // <1>
|
||||
// end::explain-request-source
|
||||
|
||||
// tag::explain-request-stored-field
|
||||
request.storedFields(new String[]{"user"}); // <1>
|
||||
// end::explain-request-stored-field
|
||||
|
||||
// tag::explain-execute
|
||||
ExplainResponse response = client.explain(request, RequestOptions.DEFAULT);
|
||||
// end::explain-execute
|
||||
|
||||
// tag::explain-response
|
||||
String index = response.getIndex(); // <1>
|
||||
String type = response.getType(); // <2>
|
||||
String id = response.getId(); // <3>
|
||||
boolean exists = response.isExists(); // <4>
|
||||
boolean match = response.isMatch(); // <5>
|
||||
boolean hasExplanation = response.hasExplanation(); // <6>
|
||||
Explanation explanation = response.getExplanation(); // <7>
|
||||
GetResult getResult = response.getGetResult(); // <8>
|
||||
// end::explain-response
|
||||
assertThat(index, equalTo("contributors"));
|
||||
assertThat(type, equalTo("doc"));
|
||||
assertThat(id, equalTo("1"));
|
||||
assertTrue(exists);
|
||||
assertTrue(match);
|
||||
assertTrue(hasExplanation);
|
||||
assertNotNull(explanation);
|
||||
assertNotNull(getResult);
|
||||
|
||||
// tag::get-result
|
||||
Map<String, Object> source = getResult.getSource(); // <1>
|
||||
Map<String, DocumentField> fields = getResult.getFields(); // <2>
|
||||
// end::get-result
|
||||
assertThat(source, equalTo(Collections.singletonMap("user", "tanguy")));
|
||||
assertThat(fields.get("user").getValue(), equalTo("tanguy"));
|
||||
|
||||
// tag::explain-execute-listener
|
||||
ActionListener<ExplainResponse> listener = new ActionListener<ExplainResponse>() {
|
||||
@Override
|
||||
public void onResponse(ExplainResponse explainResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::explain-execute-listener
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::explain-execute-async
|
||||
client.explainAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::explain-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testFieldCaps() throws Exception {
|
||||
indexSearchTestData();
|
||||
|
@ -1046,7 +1258,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(authorsResponse.isAcknowledged());
|
||||
|
||||
CreateIndexRequest reviewersRequest = new CreateIndexRequest("contributors")
|
||||
.mapping("doc", "user", "type=keyword");
|
||||
.mapping("doc", "user", "type=keyword,store=true");
|
||||
CreateIndexResponse reviewersResponse = highLevelClient().indices().create(reviewersRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(reviewersResponse.isAcknowledged());
|
||||
|
||||
|
|
|
@ -29,6 +29,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
|
@ -41,6 +47,11 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
import org.elasticsearch.snapshots.SnapshotShardFailure;
|
||||
import org.elasticsearch.snapshots.SnapshotState;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -367,6 +378,171 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
public void testSnapshotCreate() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("test-index0");
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
createIndexRequest = new CreateIndexRequest("test-index1");
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
|
||||
createTestRepositories();
|
||||
|
||||
// tag::create-snapshot-request
|
||||
CreateSnapshotRequest request = new CreateSnapshotRequest();
|
||||
// end::create-snapshot-request
|
||||
|
||||
// tag::create-snapshot-request-repositoryName
|
||||
request.repository(repositoryName); // <1>
|
||||
// end::create-snapshot-request-repositoryName
|
||||
// tag::create-snapshot-request-snapshotName
|
||||
request.snapshot(snapshotName); // <1>
|
||||
// end::create-snapshot-request-snapshotName
|
||||
// tag::create-snapshot-request-indices
|
||||
request.indices("test-index0", "test-index1"); // <1>
|
||||
// end::create-snapshot-request-indices
|
||||
// tag::create-snapshot-request-indicesOptions
|
||||
request.indicesOptions(IndicesOptions.fromOptions(false, false, true, true)); // <1>
|
||||
// end::create-snapshot-request-indicesOptions
|
||||
// tag::create-snapshot-request-partial
|
||||
request.partial(false); // <1>
|
||||
// end::create-snapshot-request-partial
|
||||
// tag::create-snapshot-request-includeGlobalState
|
||||
request.includeGlobalState(true); // <1>
|
||||
// end::create-snapshot-request-includeGlobalState
|
||||
|
||||
// tag::create-snapshot-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::create-snapshot-request-masterTimeout
|
||||
// tag::create-snapshot-request-waitForCompletion
|
||||
request.waitForCompletion(true); // <1>
|
||||
// end::create-snapshot-request-waitForCompletion
|
||||
|
||||
// tag::create-snapshot-execute
|
||||
CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT);
|
||||
// end::create-snapshot-execute
|
||||
|
||||
// tag::create-snapshot-response
|
||||
RestStatus status = response.status(); // <1>
|
||||
// end::create-snapshot-response
|
||||
|
||||
assertEquals(RestStatus.OK, status);
|
||||
}
|
||||
|
||||
public void testSnapshotCreateAsync() throws InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
CreateSnapshotRequest request = new CreateSnapshotRequest(repositoryName, snapshotName);
|
||||
|
||||
// tag::create-snapshot-execute-listener
|
||||
ActionListener<CreateSnapshotResponse> listener =
|
||||
new ActionListener<CreateSnapshotResponse>() {
|
||||
@Override
|
||||
public void onResponse(CreateSnapshotResponse createSnapshotResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::create-snapshot-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::create-snapshot-execute-async
|
||||
client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::create-snapshot-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSnapshotGetSnapshots() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
createTestRepositories();
|
||||
createTestSnapshots();
|
||||
|
||||
// tag::get-snapshots-request
|
||||
GetSnapshotsRequest request = new GetSnapshotsRequest();
|
||||
// end::get-snapshots-request
|
||||
|
||||
// tag::get-snapshots-request-repositoryName
|
||||
request.repository(repositoryName); // <1>
|
||||
// end::get-snapshots-request-repositoryName
|
||||
|
||||
// tag::get-snapshots-request-snapshots
|
||||
String[] snapshots = { snapshotName };
|
||||
request.snapshots(snapshots); // <1>
|
||||
// end::get-snapshots-request-snapshots
|
||||
|
||||
// tag::get-snapshots-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::get-snapshots-request-masterTimeout
|
||||
|
||||
// tag::get-snapshots-request-verbose
|
||||
request.verbose(true); // <1>
|
||||
// end::get-snapshots-request-verbose
|
||||
|
||||
// tag::get-snapshots-request-ignore-unavailable
|
||||
request.ignoreUnavailable(false); // <1>
|
||||
// end::get-snapshots-request-ignore-unavailable
|
||||
|
||||
// tag::get-snapshots-execute
|
||||
GetSnapshotsResponse response = client.snapshot().get(request, RequestOptions.DEFAULT);
|
||||
// end::get-snapshots-execute
|
||||
|
||||
// tag::get-snapshots-response
|
||||
List<SnapshotInfo> snapshotsInfos = response.getSnapshots();
|
||||
SnapshotInfo snapshotInfo = snapshotsInfos.get(0);
|
||||
RestStatus restStatus = snapshotInfo.status(); // <1>
|
||||
SnapshotId snapshotId = snapshotInfo.snapshotId(); // <2>
|
||||
SnapshotState snapshotState = snapshotInfo.state(); // <3>
|
||||
List<SnapshotShardFailure> snapshotShardFailures = snapshotInfo.shardFailures(); // <4>
|
||||
long startTime = snapshotInfo.startTime(); // <5>
|
||||
long endTime = snapshotInfo.endTime(); // <6>
|
||||
// end::get-snapshots-response
|
||||
assertEquals(1, snapshotsInfos.size());
|
||||
}
|
||||
|
||||
public void testSnapshotGetSnapshotsAsync() throws InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
GetSnapshotsRequest request = new GetSnapshotsRequest(repositoryName);
|
||||
|
||||
// tag::get-snapshots-execute-listener
|
||||
ActionListener<GetSnapshotsResponse> listener =
|
||||
new ActionListener<GetSnapshotsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetSnapshotsResponse getSnapshotsResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-snapshots-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-snapshots-execute-async
|
||||
client.snapshot().getAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-snapshots-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSnapshotDeleteSnapshot() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.client;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A {@link NodeSelector} that selects nodes that have a particular value
|
||||
|
@ -49,6 +50,24 @@ public final class HasAttributeNodeSelector implements NodeSelector {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
HasAttributeNodeSelector that = (HasAttributeNodeSelector) o;
|
||||
return Objects.equals(key, that.key) &&
|
||||
Objects.equals(value, that.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return key + "=" + value;
|
||||
|
|
|
@ -615,16 +615,16 @@ public class RestClient implements Closeable {
|
|||
*/
|
||||
private NodeTuple<Iterator<Node>> nextNode() throws IOException {
|
||||
NodeTuple<List<Node>> nodeTuple = this.nodeTuple;
|
||||
List<Node> hosts = selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector);
|
||||
Iterable<Node> hosts = selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector);
|
||||
return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select hosts to try. Package private for testing.
|
||||
* Select nodes to try and sorts them so that the first one will be tried initially, then the following ones
|
||||
* if the previous attempt failed and so on. Package private for testing.
|
||||
*/
|
||||
static List<Node> selectHosts(NodeTuple<List<Node>> nodeTuple,
|
||||
Map<HttpHost, DeadHostState> blacklist, AtomicInteger lastNodeIndex,
|
||||
NodeSelector nodeSelector) throws IOException {
|
||||
static Iterable<Node> selectNodes(NodeTuple<List<Node>> nodeTuple, Map<HttpHost, DeadHostState> blacklist,
|
||||
AtomicInteger lastNodeIndex, NodeSelector nodeSelector) throws IOException {
|
||||
/*
|
||||
* Sort the nodes into living and dead lists.
|
||||
*/
|
||||
|
@ -653,8 +653,8 @@ public class RestClient implements Closeable {
|
|||
nodeSelector.select(selectedLivingNodes);
|
||||
if (false == selectedLivingNodes.isEmpty()) {
|
||||
/*
|
||||
* Rotate the list so subsequent requests will prefer the
|
||||
* nodes in a different order.
|
||||
* Rotate the list using a global counter as the distance so subsequent
|
||||
* requests will try the nodes in a different order.
|
||||
*/
|
||||
Collections.rotate(selectedLivingNodes, lastNodeIndex.getAndIncrement());
|
||||
return selectedLivingNodes;
|
||||
|
@ -662,15 +662,13 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
|
||||
/*
|
||||
* Last resort: If there are no good nodes to use, either because
|
||||
* Last resort: there are no good nodes to use, either because
|
||||
* the selector rejected all the living nodes or because there aren't
|
||||
* any living ones. Either way, we want to revive a single dead node
|
||||
* that the NodeSelectors are OK with. We do this by sorting the dead
|
||||
* nodes by their revival time and passing them through the
|
||||
* NodeSelector so it can have its say in which nodes are ok and their
|
||||
* ordering. If the selector is ok with any of the nodes then use just
|
||||
* the first one in the list because we only want to revive a single
|
||||
* node.
|
||||
* that the NodeSelectors are OK with. We do this by passing the dead
|
||||
* nodes through the NodeSelector so it can have its say in which nodes
|
||||
* are ok. If the selector is ok with any of the nodes then we will take
|
||||
* the one in the list that has the lowest revival time and try it.
|
||||
*/
|
||||
if (false == deadNodes.isEmpty()) {
|
||||
final List<DeadNode> selectedDeadNodes = new ArrayList<>(deadNodes);
|
||||
|
@ -796,8 +794,10 @@ public class RestClient implements Closeable {
|
|||
Objects.requireNonNull(path, "path must not be null");
|
||||
try {
|
||||
String fullPath;
|
||||
if (pathPrefix != null) {
|
||||
if (path.startsWith("/")) {
|
||||
if (pathPrefix != null && pathPrefix.isEmpty() == false) {
|
||||
if (pathPrefix.endsWith("/") && path.startsWith("/")) {
|
||||
fullPath = pathPrefix.substring(0, pathPrefix.length() - 1) + path;
|
||||
} else if (pathPrefix.endsWith("/") || path.startsWith("/")) {
|
||||
fullPath = pathPrefix + path;
|
||||
} else {
|
||||
fullPath = pathPrefix + "/" + path;
|
||||
|
@ -1010,8 +1010,8 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Adapts an <code>Iterator<DeadNodeAndRevival></code> into an
|
||||
* <code>Iterator<Node></code>.
|
||||
* Adapts an <code>Iterator<DeadNodeAndRevival></code> into an
|
||||
* <code>Iterator<Node></code>.
|
||||
*/
|
||||
private static class DeadNodeIteratorAdapter implements Iterator<Node> {
|
||||
private final Iterator<DeadNode> itr;
|
||||
|
|
|
@ -143,22 +143,26 @@ public final class RestClientBuilder {
|
|||
* For example, if this is set to "/my/path", then any client request will become <code>"/my/path/" + endpoint</code>.
|
||||
* <p>
|
||||
* In essence, every request's {@code endpoint} is prefixed by this {@code pathPrefix}. The path prefix is useful for when
|
||||
* Elasticsearch is behind a proxy that provides a base path; it is not intended for other purposes and it should not be supplied in
|
||||
* other scenarios.
|
||||
* Elasticsearch is behind a proxy that provides a base path or a proxy that requires all paths to start with '/';
|
||||
* it is not intended for other purposes and it should not be supplied in other scenarios.
|
||||
*
|
||||
* @throws NullPointerException if {@code pathPrefix} is {@code null}.
|
||||
* @throws IllegalArgumentException if {@code pathPrefix} is empty, only '/', or ends with more than one '/'.
|
||||
* @throws IllegalArgumentException if {@code pathPrefix} is empty, or ends with more than one '/'.
|
||||
*/
|
||||
public RestClientBuilder setPathPrefix(String pathPrefix) {
|
||||
Objects.requireNonNull(pathPrefix, "pathPrefix must not be null");
|
||||
String cleanPathPrefix = pathPrefix;
|
||||
|
||||
if (pathPrefix.isEmpty()) {
|
||||
throw new IllegalArgumentException("pathPrefix must not be empty");
|
||||
}
|
||||
|
||||
String cleanPathPrefix = pathPrefix;
|
||||
if (cleanPathPrefix.startsWith("/") == false) {
|
||||
cleanPathPrefix = "/" + cleanPathPrefix;
|
||||
}
|
||||
|
||||
// best effort to ensure that it looks like "/base/path" rather than "/base/path/"
|
||||
if (cleanPathPrefix.endsWith("/")) {
|
||||
if (cleanPathPrefix.endsWith("/") && cleanPathPrefix.length() > 1) {
|
||||
cleanPathPrefix = cleanPathPrefix.substring(0, cleanPathPrefix.length() - 1);
|
||||
|
||||
if (cleanPathPrefix.endsWith("/")) {
|
||||
|
@ -166,9 +170,6 @@ public final class RestClientBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
if (cleanPathPrefix.isEmpty() || "/".equals(cleanPathPrefix)) {
|
||||
throw new IllegalArgumentException("pathPrefix must not be empty or '/': [" + pathPrefix + "]");
|
||||
}
|
||||
|
||||
this.pathPrefix = cleanPathPrefix;
|
||||
return this;
|
||||
|
|
|
@ -180,7 +180,6 @@ public class RestClientBuilderTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
public void testSetPathPrefixEmpty() {
|
||||
assertSetPathPrefixThrows("/");
|
||||
assertSetPathPrefixThrows("");
|
||||
}
|
||||
|
||||
|
|
|
@ -314,7 +314,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeaders()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void tesPerformRequestOldStyleNullHeaders() throws IOException {
|
||||
|
|
|
@ -21,6 +21,9 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.client.AuthCache;
|
||||
import org.apache.http.impl.auth.BasicScheme;
|
||||
import org.apache.http.impl.client.BasicAuthCache;
|
||||
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
|
||||
import org.elasticsearch.client.DeadHostStateTests.ConfigurableTimeSupplier;
|
||||
import org.elasticsearch.client.RestClient.NodeTuple;
|
||||
|
@ -35,13 +38,14 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertSame;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
@ -141,7 +145,7 @@ public class RestClientTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeader()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void testPerformOldStyleAsyncWithNullHeaders() throws Exception {
|
||||
|
@ -219,12 +223,33 @@ public class RestClientTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
public void testBuildUriLeavesPathUntouched() {
|
||||
final Map<String, String> emptyMap = Collections.emptyMap();
|
||||
{
|
||||
URI uri = RestClient.buildUri("/foo$bar", "/index/type/id", Collections.<String, String>emptyMap());
|
||||
URI uri = RestClient.buildUri("/foo$bar", "/index/type/id", emptyMap);
|
||||
assertEquals("/foo$bar/index/type/id", uri.getPath());
|
||||
}
|
||||
{
|
||||
URI uri = RestClient.buildUri(null, "/foo$bar/ty/pe/i/d", Collections.<String, String>emptyMap());
|
||||
URI uri = RestClient.buildUri("/", "/*", emptyMap);
|
||||
assertEquals("/*", uri.getPath());
|
||||
}
|
||||
{
|
||||
URI uri = RestClient.buildUri("/", "*", emptyMap);
|
||||
assertEquals("/*", uri.getPath());
|
||||
}
|
||||
{
|
||||
URI uri = RestClient.buildUri(null, "*", emptyMap);
|
||||
assertEquals("*", uri.getPath());
|
||||
}
|
||||
{
|
||||
URI uri = RestClient.buildUri("", "*", emptyMap);
|
||||
assertEquals("*", uri.getPath());
|
||||
}
|
||||
{
|
||||
URI uri = RestClient.buildUri(null, "/*", emptyMap);
|
||||
assertEquals("/*", uri.getPath());
|
||||
}
|
||||
{
|
||||
URI uri = RestClient.buildUri(null, "/foo$bar/ty/pe/i/d", emptyMap);
|
||||
assertEquals("/foo$bar/ty/pe/i/d", uri.getPath());
|
||||
}
|
||||
{
|
||||
|
@ -407,8 +432,8 @@ public class RestClientTests extends RestClientTestCase {
|
|||
* blacklist time. It'll revive the node that is closest
|
||||
* to being revived that the NodeSelector is ok with.
|
||||
*/
|
||||
assertEquals(singletonList(n1), RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY));
|
||||
assertEquals(singletonList(n2), RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(), not1));
|
||||
assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY));
|
||||
assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), not1));
|
||||
|
||||
/*
|
||||
* Try a NodeSelector that excludes all nodes. This should
|
||||
|
@ -449,23 +474,23 @@ public class RestClientTests extends RestClientTestCase {
|
|||
Map<HttpHost, DeadHostState> blacklist, NodeSelector nodeSelector) throws IOException {
|
||||
int iterations = 1000;
|
||||
AtomicInteger lastNodeIndex = new AtomicInteger(0);
|
||||
assertEquals(expectedNodes, RestClient.selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
// Calling it again rotates the set of results
|
||||
for (int i = 1; i < iterations; i++) {
|
||||
Collections.rotate(expectedNodes, 1);
|
||||
assertEquals("iteration " + i, expectedNodes,
|
||||
RestClient.selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that {@link RestClient#selectHosts} fails on the provided arguments.
|
||||
* Assert that {@link RestClient#selectNodes} fails on the provided arguments.
|
||||
* @return the message in the exception thrown by the failure
|
||||
*/
|
||||
private String assertSelectAllRejected( NodeTuple<List<Node>> nodeTuple,
|
||||
private static String assertSelectAllRejected( NodeTuple<List<Node>> nodeTuple,
|
||||
Map<HttpHost, DeadHostState> blacklist, NodeSelector nodeSelector) {
|
||||
try {
|
||||
RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector);
|
||||
RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector);
|
||||
throw new AssertionError("expected selectHosts to fail");
|
||||
} catch (IOException e) {
|
||||
return e.getMessage();
|
||||
|
@ -478,5 +503,56 @@ public class RestClientTests extends RestClientTestCase {
|
|||
new Header[] {}, nodes, null, null, null);
|
||||
}
|
||||
|
||||
public void testRoundRobin() throws IOException {
|
||||
int numNodes = randomIntBetween(2, 10);
|
||||
AuthCache authCache = new BasicAuthCache();
|
||||
List<Node> nodes = new ArrayList<>(numNodes);
|
||||
for (int i = 0; i < numNodes; i++) {
|
||||
Node node = new Node(new HttpHost("localhost", 9200 + i));
|
||||
nodes.add(node);
|
||||
authCache.put(node.getHost(), new BasicScheme());
|
||||
}
|
||||
NodeTuple<List<Node>> nodeTuple = new NodeTuple<>(nodes, authCache);
|
||||
|
||||
//test the transition from negative to positive values
|
||||
AtomicInteger lastNodeIndex = new AtomicInteger(-numNodes);
|
||||
assertNodes(nodeTuple, lastNodeIndex, 50);
|
||||
assertEquals(-numNodes + 50, lastNodeIndex.get());
|
||||
|
||||
//test the highest positive values up to MAX_VALUE
|
||||
lastNodeIndex.set(Integer.MAX_VALUE - numNodes * 10);
|
||||
assertNodes(nodeTuple, lastNodeIndex, numNodes * 10);
|
||||
assertEquals(Integer.MAX_VALUE, lastNodeIndex.get());
|
||||
|
||||
//test the transition from MAX_VALUE to MIN_VALUE
|
||||
//this is the only time where there is most likely going to be a jump from a node
|
||||
//to another one that's not necessarily the next one.
|
||||
assertEquals(Integer.MIN_VALUE, lastNodeIndex.incrementAndGet());
|
||||
assertNodes(nodeTuple, lastNodeIndex, 50);
|
||||
assertEquals(Integer.MIN_VALUE + 50, lastNodeIndex.get());
|
||||
}
|
||||
|
||||
private static void assertNodes(NodeTuple<List<Node>> nodeTuple, AtomicInteger lastNodeIndex, int runs) throws IOException {
|
||||
int distance = lastNodeIndex.get() % nodeTuple.nodes.size();
|
||||
/*
|
||||
* Collections.rotate is not super intuitive: distance 1 means that the last element will become the first and so on,
|
||||
* while distance -1 means that the second element will become the first and so on.
|
||||
*/
|
||||
int expectedOffset = distance > 0 ? nodeTuple.nodes.size() - distance : Math.abs(distance);
|
||||
for (int i = 0; i < runs; i++) {
|
||||
Iterable<Node> selectedNodes = RestClient.selectNodes(nodeTuple, Collections.<HttpHost, DeadHostState>emptyMap(),
|
||||
lastNodeIndex, NodeSelector.ANY);
|
||||
List<Node> expectedNodes = nodeTuple.nodes;
|
||||
int index = 0;
|
||||
for (Node actualNode : selectedNodes) {
|
||||
Node expectedNode = expectedNodes.get((index + expectedOffset) % expectedNodes.size());
|
||||
assertSame(expectedNode, actualNode);
|
||||
index++;
|
||||
}
|
||||
expectedOffset--;
|
||||
if (expectedOffset < 0) {
|
||||
expectedOffset += nodeTuple.nodes.size();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,12 @@ my %Group_Labels = (
|
|||
'other' => 'NOT CLASSIFIED',
|
||||
);
|
||||
|
||||
my %Area_Overrides = (
|
||||
':ml' => 'Machine Learning',
|
||||
':beats' => 'Beats Plugin',
|
||||
':Docs' => 'Docs Infrastructure'
|
||||
);
|
||||
|
||||
use JSON();
|
||||
use Encode qw(encode_utf8);
|
||||
|
||||
|
@ -175,8 +181,14 @@ ISSUE:
|
|||
# uncomment for including/excluding PRs already issued in other versions
|
||||
# next if grep {$_->{name}=~/^v2/} @{$issue->{labels}};
|
||||
my %labels = map { $_->{name} => 1 } @{ $issue->{labels} };
|
||||
my ($header) = map { m{:[^/]+/(.+)} && $1 }
|
||||
grep {/^:/} sort keys %labels;
|
||||
my @area_labels = grep {/^:/} sort keys %labels;
|
||||
my ($header) = map { m{:[^/]+/(.+)} && $1 } @area_labels;
|
||||
if (scalar @area_labels > 1) {
|
||||
$header = "MULTIPLE AREA LABELS";
|
||||
}
|
||||
if (scalar @area_labels == 1 && exists $Area_Overrides{$area_labels[0]}) {
|
||||
$header = $Area_Overrides{$area_labels[0]};
|
||||
}
|
||||
$header ||= 'NOT CLASSIFIED';
|
||||
for (@Groups) {
|
||||
if ( $labels{$_} ) {
|
||||
|
|
|
@ -102,7 +102,7 @@ Closure commonZipConfig = {
|
|||
|
||||
task buildIntegTestZip(type: Zip) {
|
||||
configure(commonZipConfig)
|
||||
with archiveFiles(transportModulesFiles, 'zip', false)
|
||||
with archiveFiles(transportModulesFiles, 'zip', true)
|
||||
}
|
||||
|
||||
task buildZip(type: Zip) {
|
||||
|
@ -193,7 +193,7 @@ subprojects {
|
|||
onlyIf toolExists
|
||||
doLast {
|
||||
final String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
if (project.name.contains('oss-') || project.name == 'integ-test-zip') {
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
licenseFilename = "ELASTIC-LICENSE.txt"
|
||||
|
|
|
@ -18,12 +18,14 @@
|
|||
*/
|
||||
|
||||
|
||||
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.Version
|
||||
|
||||
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
||||
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||
/**
|
||||
* This is a dummy project which does a local checkout of the previous
|
||||
* wire compat version's branch, and builds a snapshot. This allows backcompat
|
||||
|
@ -147,12 +149,16 @@ subprojects {
|
|||
|
||||
task buildBwcVersion(type: Exec) {
|
||||
dependsOn checkoutBwcBranch, writeBuildMetadata
|
||||
// send RUNTIME_JAVA_HOME so the build doesn't fails on newer version the branch doesn't know about
|
||||
environment('RUNTIME_JAVA_HOME', getJavaHome(it, rootProject.ext.minimumRuntimeVersion.getMajorVersion() as int))
|
||||
workingDir = checkoutDir
|
||||
// we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds
|
||||
if (["5.6", "6.0", "6.1"].contains(bwcBranch)) {
|
||||
// we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds
|
||||
environment('JAVA_HOME', getJavaHome(it, 8))
|
||||
} else if ("6.2".equals(bwcBranch)) {
|
||||
environment('JAVA_HOME', getJavaHome(it, 9))
|
||||
} else if (["6.3", "6.x"].contains(bwcBranch)) {
|
||||
environment('JAVA_HOME', getJavaHome(it, 10))
|
||||
} else {
|
||||
environment('JAVA_HOME', project.compilerJavaHome)
|
||||
}
|
||||
|
@ -177,6 +183,8 @@ subprojects {
|
|||
} else if (showStacktraceName.equals("ALWAYS_FULL")) {
|
||||
args "--full-stacktrace"
|
||||
}
|
||||
standardOutput = new IndentingOutputStream(System.out)
|
||||
errorOutput = new IndentingOutputStream(System.err)
|
||||
doLast {
|
||||
List missing = artifactFiles.grep { file ->
|
||||
false == file.exists()
|
||||
|
@ -196,3 +204,27 @@ subprojects {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
class IndentingOutputStream extends OutputStream {
|
||||
|
||||
public static final byte[] INDENT = " [bwc] ".getBytes(StandardCharsets.UTF_8)
|
||||
private final OutputStream delegate
|
||||
|
||||
public IndentingOutputStream(OutputStream delegate) {
|
||||
this.delegate = delegate
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int b) {
|
||||
write([b] as int[], 0, 1)
|
||||
}
|
||||
|
||||
public void write(int[] bytes, int offset, int length) {
|
||||
for (int i = 0; i < bytes.length; i++) {
|
||||
delegate.write(bytes[i])
|
||||
if (bytes[i] == '\n') {
|
||||
delegate.write(INDENT)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,6 +9,18 @@
|
|||
# $1=1 : indicates an new install
|
||||
# $1=2 : indicates an upgrade
|
||||
|
||||
# Check for these at preinst time due to failures in postinst if they do not exist
|
||||
if [ -x "$JAVA_HOME/bin/java" ]; then
|
||||
JAVA="$JAVA_HOME/bin/java"
|
||||
else
|
||||
JAVA=`which java`
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA" ]; then
|
||||
echo "could not find java; set JAVA_HOME or ensure java is in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
|
||||
# Debian ####################################################
|
||||
|
|
|
@ -122,7 +122,7 @@ case "$1" in
|
|||
ulimit -l $MAX_LOCKED_MEMORY
|
||||
fi
|
||||
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -ge $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -gt $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT
|
||||
fi
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ start() {
|
|||
if [ -n "$MAX_LOCKED_MEMORY" ]; then
|
||||
ulimit -l $MAX_LOCKED_MEMORY
|
||||
fi
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -ge $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -gt $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT
|
||||
fi
|
||||
|
||||
|
|
|
@ -8,7 +8,8 @@ CONSOLE" and "COPY AS CURL" in the documentation and are automatically tested
|
|||
by the command `gradle :docs:check`. To test just the docs from a single page,
|
||||
use e.g. `gradle :docs:check -Dtests.method="\*rollover*"`.
|
||||
|
||||
NOTE: If you have an elasticsearch-extra folder alongside your elasticsearch folder, you must temporarily rename it when you are testing 6.3 or later branches.
|
||||
NOTE: If you have an elasticsearch-extra folder alongside your elasticsearch
|
||||
folder, you must temporarily rename it when you are testing 6.3 or later branches.
|
||||
|
||||
By default each `// CONSOLE` snippet runs as its own isolated test. You can
|
||||
manipulate the test execution in the following ways:
|
||||
|
@ -36,7 +37,8 @@ for its modifiers:
|
|||
reason why the test shouldn't be run.
|
||||
* `// TEST[setup:name]`: Run some setup code before running the snippet. This
|
||||
is useful for creating and populating indexes used in the snippet. The setup
|
||||
code is defined in `docs/build.gradle`.
|
||||
code is defined in `docs/build.gradle`. See `// TESTSETUP` below for a
|
||||
similar feature.
|
||||
* `// TEST[warning:some warning]`: Expect the response to include a `Warning`
|
||||
header. If the response doesn't include a `Warning` header with the exact
|
||||
text then the test fails. If the response includes `Warning` headers that
|
||||
|
@ -68,7 +70,9 @@ for its modifiers:
|
|||
a test that runs the setup snippet first. See the "painless" docs for a file
|
||||
that puts this to good use. This is fairly similar to `// TEST[setup:name]`
|
||||
but rather than the setup defined in `docs/build.gradle` the setup is defined
|
||||
right in the documentation file.
|
||||
right in the documentation file. In general, we should prefer `// TESTSETUP`
|
||||
over `// TEST[setup:name]` because it makes it more clear what steps have to
|
||||
be taken before the examples will work.
|
||||
|
||||
In addition to the standard CONSOLE syntax these snippets can contain blocks
|
||||
of yaml surrounded by markers like this:
|
||||
|
|
|
@ -39,6 +39,15 @@ integTestCluster {
|
|||
setting 'reindex.remote.whitelist', '127.0.0.1:*'
|
||||
}
|
||||
|
||||
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed
|
||||
if (rootProject.ext.compilerJavaVersion.isJava11()) {
|
||||
integTestRunner {
|
||||
systemProperty 'tests.rest.blacklist', [
|
||||
'plugins/ingest-attachment/line_164',
|
||||
'plugins/ingest-attachment/line_117'
|
||||
].join(',')
|
||||
}
|
||||
}
|
||||
// Build the cluster with all plugins
|
||||
|
||||
project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj ->
|
||||
|
@ -63,6 +72,8 @@ buildRestTests.docs = fileTree(projectDir) {
|
|||
exclude 'README.asciidoc'
|
||||
}
|
||||
|
||||
listSnippets.docs = buildRestTests.docs
|
||||
|
||||
Closure setupTwitter = { String name, int count ->
|
||||
buildRestTests.setups[name] = '''
|
||||
- do:
|
||||
|
@ -225,31 +236,6 @@ buildRestTests.doFirst {
|
|||
buildRestTests.setups['bank'].replace('#bank_data#', accounts)
|
||||
}
|
||||
|
||||
buildRestTests.setups['range_index'] = '''
|
||||
- do :
|
||||
indices.create:
|
||||
index: range_index
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: 2
|
||||
number_of_replicas: 1
|
||||
mappings:
|
||||
_doc:
|
||||
properties:
|
||||
expected_attendees:
|
||||
type: integer_range
|
||||
time_frame:
|
||||
type: date_range
|
||||
format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis
|
||||
- do:
|
||||
bulk:
|
||||
index: range_index
|
||||
type: _doc
|
||||
refresh: true
|
||||
body: |
|
||||
{"index":{"_id": 1}}
|
||||
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''
|
||||
|
||||
// Used by index boost doc
|
||||
buildRestTests.setups['index_boost'] = '''
|
||||
- do:
|
||||
|
@ -603,4 +589,4 @@ buildRestTests.setups['library'] = '''
|
|||
{"index":{"_id": "The Moon is a Harsh Mistress"}}
|
||||
{"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288}
|
||||
|
||||
'''
|
||||
'''
|
||||
|
|
|
@ -13,8 +13,8 @@ Here is an example on how to create the aggregation request:
|
|||
--------------------------------------------------
|
||||
ScriptedMetricAggregationBuilder aggregation = AggregationBuilders
|
||||
.scriptedMetric("agg")
|
||||
.initScript(new Script("params._agg.heights = []"))
|
||||
.mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)"));
|
||||
.initScript(new Script("state.heights = []"))
|
||||
.mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)"));
|
||||
--------------------------------------------------
|
||||
|
||||
You can also specify a `combine` script which will be executed on each shard:
|
||||
|
@ -23,9 +23,9 @@ You can also specify a `combine` script which will be executed on each shard:
|
|||
--------------------------------------------------
|
||||
ScriptedMetricAggregationBuilder aggregation = AggregationBuilders
|
||||
.scriptedMetric("agg")
|
||||
.initScript(new Script("params._agg.heights = []"))
|
||||
.mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)"))
|
||||
.combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum"));
|
||||
.initScript(new Script("state.heights = []"))
|
||||
.mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)"))
|
||||
.combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum"));
|
||||
--------------------------------------------------
|
||||
|
||||
You can also specify a `reduce` script which will be executed on the node which gets the request:
|
||||
|
@ -34,10 +34,10 @@ You can also specify a `reduce` script which will be executed on the node which
|
|||
--------------------------------------------------
|
||||
ScriptedMetricAggregationBuilder aggregation = AggregationBuilders
|
||||
.scriptedMetric("agg")
|
||||
.initScript(new Script("params._agg.heights = []"))
|
||||
.mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)"))
|
||||
.combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum"))
|
||||
.reduceScript(new Script("double heights_sum = 0.0; for (a in params._aggs) { heights_sum += a } return heights_sum"));
|
||||
.initScript(new Script("state.heights = []"))
|
||||
.mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)"))
|
||||
.combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum"))
|
||||
.reduceScript(new Script("double heights_sum = 0.0; for (a in states) { heights_sum += a } return heights_sum"));
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
[[java-rest-high-cluster-get-settings]]
|
||||
=== Cluster Get Settings API
|
||||
|
||||
The Cluster Get Settings API allows to get the cluster wide settings.
|
||||
|
||||
[[java-rest-high-cluster-get-settings-request]]
|
||||
==== Cluster Get Settings Request
|
||||
|
||||
A `ClusterGetSettingsRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request]
|
||||
--------------------------------------------------
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-includeDefaults]
|
||||
--------------------------------------------------
|
||||
<1> By default only those settings that were explicitly set are returned. Setting this to true also returns
|
||||
the default settings.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-local]
|
||||
--------------------------------------------------
|
||||
<1> By default the request goes to the master of the cluster to get the latest results. If local is specified it gets
|
||||
the results from whichever node the request goes to.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`
|
||||
<2> Timeout to connect to the master node as a `String`
|
||||
|
||||
[[java-rest-high-cluster-get-settings-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute]
|
||||
--------------------------------------------------
|
||||
<1> Execute the request and get back the response in a `ClusterGetSettingsResponse` object.
|
||||
|
||||
[[java-rest-high-cluster-get-settings-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a cluster get settings requires both the
|
||||
`ClusterGetSettingsRequest` instance and an `ActionListener` instance to be
|
||||
passed to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ClusterGetSettingsRequest` to execute and the `ActionListener`
|
||||
to use when the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `ClusterGetSettingsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of a failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-cluster-get-settings-response]]
|
||||
==== Cluster Get Settings Response
|
||||
|
||||
The returned `ClusterGetSettingsResponse` allows to retrieve information about the
|
||||
executed operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-response]
|
||||
--------------------------------------------------
|
||||
<1> Get the persistent settings.
|
||||
<2> Get the transient settings.
|
||||
<3> Get the default settings (returns empty settings if `includeDefaults` was not set to `true`).
|
||||
<4> Get the value as a `String` for a particular setting. The order of searching is first in `persistentSettings` then in
|
||||
`transientSettings` and finally, if not found in either, in `defaultSettings`.
|
|
@ -0,0 +1,119 @@
|
|||
[[java-rest-high-analyze]]
|
||||
=== Analyze API
|
||||
|
||||
[[java-rest-high-analyze-request]]
|
||||
==== Analyze Request
|
||||
|
||||
An `AnalyzeRequest` contains the text to analyze, and one of several options to
|
||||
specify how the analysis should be performed.
|
||||
|
||||
The simplest version uses a built-in analyzer:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-builtin-request]
|
||||
---------------------------------------------------
|
||||
<1> The text to include. Multiple strings are treated as a multi-valued field
|
||||
<2> A built-in analyzer
|
||||
|
||||
You can configure a custom analyzer:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-request]
|
||||
---------------------------------------------------
|
||||
<1> Configure char filters
|
||||
<2> Configure the tokenizer
|
||||
<3> Add a built-in tokenfilter
|
||||
<4> Configuration for a custom tokenfilter
|
||||
<5> Add the custom tokenfilter
|
||||
|
||||
You can also build a custom normalizer, by including only charfilters and
|
||||
tokenfilters:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-normalizer-request]
|
||||
---------------------------------------------------
|
||||
|
||||
You can analyze text using an analyzer defined in an existing index:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-request]
|
||||
---------------------------------------------------
|
||||
<1> The index containing the mappings
|
||||
<2> The analyzer defined on this index to use
|
||||
|
||||
Or you can use a normalizer:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-normalizer-request]
|
||||
---------------------------------------------------
|
||||
<1> The index containing the mappings
|
||||
<2> The normalizer defined on this index to use
|
||||
|
||||
You can analyze text using the mappings for a particular field in an index:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-field-request]
|
||||
---------------------------------------------------
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can also optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-explain]
|
||||
---------------------------------------------------
|
||||
<1> Setting `explain` to true will add further details to the response
|
||||
<2> Setting `attributes` allows you to return only token attributes that you are
|
||||
interested in
|
||||
|
||||
[[java-rest-high-analyze-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-sync]
|
||||
---------------------------------------------------
|
||||
|
||||
[[java-rest-high-analyze-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of an analyze request requires both the `AnalyzeRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asyncronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-async]
|
||||
---------------------------------------------------
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method if the
|
||||
execution successfully completed or using the `onFailure` method if it failed.
|
||||
|
||||
A typical listener for `AnalyzeResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-execute-listener]
|
||||
---------------------------------------------------
|
||||
|
||||
[[java-rest-high-analyze-response]]
|
||||
==== Analyze Response
|
||||
|
||||
The returned `AnalyzeResponse` allows you to retrieve details of the analysis as
|
||||
follows:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-tokens]
|
||||
---------------------------------------------------
|
||||
<1> `AnalyzeToken` holds information about the individual tokens produced by analysis
|
||||
|
||||
If `explain` was set to `true`, then information is instead returned from the `detail()`
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-detail]
|
||||
---------------------------------------------------
|
||||
<1> `DetailAnalyzeResponse` holds more detailed information about tokens produced by
|
||||
the various substeps in the analysis chain.
|
|
@ -0,0 +1,88 @@
|
|||
[[java-rest-high-get-index]]
|
||||
=== Get Index API
|
||||
|
||||
[[java-rest-high-get-index-request]]
|
||||
==== Get Index Request
|
||||
|
||||
A `GetIndexRequest` requires one or more `index` arguments:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-request]
|
||||
--------------------------------------------------
|
||||
<1> The index whose information we want to retrieve
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-request-includeDefaults]
|
||||
--------------------------------------------------
|
||||
<1> If true, defaults will be returned for settings not explicitly set on the index
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-request-indicesOptions]
|
||||
--------------------------------------------------
|
||||
<1> Setting `IndicesOptions` controls how unavailable indices are resolved and
|
||||
how wildcard expressions are expanded
|
||||
|
||||
[[java-rest-high-get-index-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-get-index-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a Get Index request requires both the `GetIndexRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetIndexRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `GetIndexResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
||||
|
||||
[[java-rest-high-get-index-response]]
|
||||
==== Get Index Response
|
||||
|
||||
The returned `GetIndexResponse` allows to retrieve information about the
|
||||
executed operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-response]
|
||||
--------------------------------------------------
|
||||
<1> Retrieve a Map of different types to `MappingMetadata` for `index`.
|
||||
<2> Retrieve a Map for the properties for document type `doc`.
|
||||
<3> Get the list of aliases for `index`.
|
||||
<4> Get the value for the setting string `index.number_of_shards` for `index`. If the setting was not explicitly
|
||||
specified but was part of the default settings (and includeDefault was `true`) then the default setting would be
|
||||
retrieved.
|
||||
<5> Retrieve all settings for `index`.
|
||||
<6> The `Settings` objects gives more flexibility. Here it is used to extract the setting `index.number_of_shards` as an
|
||||
integer.
|
||||
<7> Get the default setting `index.refresh_interval` (if `includeDefault` was set to `true`). If `includeDefault` was set
|
||||
to `false`, `getIndexResponse.defaultSettings()` will return an empty map.
|
|
@ -0,0 +1,64 @@
|
|||
[[java-rest-high-x-pack-info]]
|
||||
=== X-Pack Info API
|
||||
|
||||
[[java-rest-high-x-pack-info-execution]]
|
||||
==== Execution
|
||||
|
||||
General information about the installed {xpack} features can be retrieved
|
||||
using the `xPackInfo()` method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execute]
|
||||
--------------------------------------------------
|
||||
<1> Enable verbose mode. The default is `false` but `true` will return
|
||||
more information.
|
||||
<2> Set the categories of information to retrieve. The the default is to
|
||||
return no information which is useful for checking if {xpack} is installed
|
||||
but not much else.
|
||||
|
||||
[[java-rest-high-x-pack-info-response]]
|
||||
==== Response
|
||||
|
||||
The returned `XPackInfoResponse` can contain `BuildInfo`, `LicenseInfo`,
|
||||
and `FeatureSetsInfo` depending on the categories requested.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-response]
|
||||
--------------------------------------------------
|
||||
<1> `BuildInfo` contains the commit hash from which Elasticsearch was
|
||||
built and the timestamp that the x-pack module was created.
|
||||
<2> `LicenseInfo` contains the type of license that the cluster is using
|
||||
and its expiration date.
|
||||
<3> Basic licenses do not expire and will return this constant.
|
||||
<4> `FeatureSetsInfo` contains a `Map` from the name of a feature to
|
||||
information about a feature like whether or not it is available under
|
||||
the current license.
|
||||
|
||||
[[java-rest-high-x-pack-info-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `XPackInfoRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `XPackInfoResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
|
@ -0,0 +1,113 @@
|
|||
[[java-rest-high-explain]]
|
||||
=== Explain API
|
||||
|
||||
The explain api computes a score explanation for a query and a specific document.
|
||||
This can give useful feedback whether a document matches or didn’t match a specific query.
|
||||
|
||||
[[java-rest-high-explain-request]]
|
||||
==== Explain Request
|
||||
|
||||
An `ExplainRequest` expects an `index`, a `type` and an `id` to specify a certain document,
|
||||
and a query represented by `QueryBuilder` to run against it (the way of <<java-rest-high-query-builders, building queries>>).
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== Optional arguments
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> Set a routing parameter
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-preference]
|
||||
--------------------------------------------------
|
||||
<1> Use the preference parameter e.g. to execute the search to prefer local
|
||||
shards. The default is to randomize across shards.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-source]
|
||||
--------------------------------------------------
|
||||
<1> Set to true to retrieve the _source of the document explained. You can also
|
||||
retrieve part of the document by using _source_include & _source_exclude
|
||||
(see <<java-rest-high-document-get-request-optional-arguments, Get API>> for more details)
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-stored-field]
|
||||
--------------------------------------------------
|
||||
<1> Allows to control which stored fields to return as part of the document explained
|
||||
(requires the field to be stored separately in the mappings).
|
||||
|
||||
[[java-rest-high-explain-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
The `explain` method executes the request synchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-explain-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The `explainAsync` method executes the request asynchronously,
|
||||
calling the provided `ActionListener` when the response is ready:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ExplainRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
completes, the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `ExplainResponse` is constructed as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed.
|
||||
<2> Called when the whole `FieldCapabilitiesRequest` fails.
|
||||
|
||||
[[java-rest-high-explain-response]]
|
||||
==== ExplainResponse
|
||||
|
||||
The `ExplainResponse` contains the following information:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-response]
|
||||
--------------------------------------------------
|
||||
<1> The index name of the explained document.
|
||||
<2> The type name of the explained document.
|
||||
<3> The id of the explained document.
|
||||
<4> Indicates whether or not the explained document exists.
|
||||
<5> Indicates whether or not there is a match between the explained document and
|
||||
the provided query (the `match` is retrieved from the lucene `Explanation` behind the scenes
|
||||
if the lucene `Explanation` models a match, it returns `true`, otherwise it returns `false`).
|
||||
<6> Indicates whether or not there exists a lucene `Explanation` for this request.
|
||||
<7> Get the lucene `Explanation` object if there exists.
|
||||
<8> Get the `GetResult` object if the `_source` or the stored fields are retrieved.
|
||||
|
||||
The `GetResult` contains two maps internally to store the fetched `_source` and stored fields.
|
||||
You can use the following methods to get them:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[get-result]
|
||||
--------------------------------------------------
|
||||
<1> Retrieve the `_source` as a map.
|
||||
<2> Retrieve the specified stored fields as a map.
|
|
@ -0,0 +1,81 @@
|
|||
[[java-rest-high-multi-search-template]]
|
||||
=== Multi-Search-Template API
|
||||
|
||||
The `multiSearchTemplate` API executes multiple <<java-rest-high-search-template,`search template`>>
|
||||
requests in a single http request in parallel.
|
||||
|
||||
[[java-rest-high-multi-search-template-request]]
|
||||
==== Multi-Search-Template Request
|
||||
|
||||
The `MultiSearchTemplateRequest` is built empty and you add all of the searches that
|
||||
you wish to execute to it:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-request-inline]
|
||||
--------------------------------------------------
|
||||
<1> Create an empty `MultiSearchTemplateRequest`.
|
||||
<2> Create one or more `SearchTemplateRequest` objects and populate them just like you
|
||||
would for a regular <<java-rest-high-search-template,`search template`>>.
|
||||
<3> Add the `SearchTemplateRequest` to the `MultiSearchTemplateRequest`.
|
||||
|
||||
===== Optional arguments
|
||||
|
||||
The multiSearchTemplate's `max_concurrent_searches` request parameter can be used to control
|
||||
the maximum number of concurrent searches the multi search api will execute.
|
||||
This default is based on the number of data nodes and the default search thread pool size.
|
||||
|
||||
[[java-rest-high-multi-search-template-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
The `multiSearchTemplate` method executes `MultiSearchTemplateRequest`s synchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-request-sync]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-multi-search-template-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The `multiSearchTemplateAsync` method executes `MultiSearchTemplateRequest`s asynchronously,
|
||||
calling the provided `ActionListener` when the response is ready.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-execute-async]
|
||||
--------------------------------------------------
|
||||
The parameters are the `MultiSearchTemplateRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `MultiSearchTemplateResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed.
|
||||
<2> Called when the whole `MultiSearchTemplateRequest` fails.
|
||||
|
||||
==== MultiSearchTemplateResponse
|
||||
|
||||
The `MultiSearchTemplateResponse` that is returned by executing the `multiSearchTemplate` method contains
|
||||
a `MultiSearchTemplateResponse.Item` for each `SearchTemplateRequest` in the
|
||||
`MultiSearchTemplateRequest`. Each `MultiSearchTemplateResponse.Item` contains an
|
||||
exception in `getFailure` if the request failed or a
|
||||
<<java-rest-high-search-response,`SearchResponse`>> in `getResponse` if
|
||||
the request succeeded:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-response]
|
||||
--------------------------------------------------
|
||||
<1> An array of responses is returned - one response for each request
|
||||
<2> Failed search template requests have error messages
|
||||
<3> Successful requests contain a <<java-rest-high-search-response,`SearchResponse`>> in
|
||||
`getResponse`.
|
|
@ -0,0 +1,121 @@
|
|||
[[java-rest-high-snapshot-create-snapshot]]
|
||||
=== Create Snapshot API
|
||||
|
||||
Use the Create Snapshot API to create a new snapshot.
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-request]]
|
||||
==== Create Snapshot Request
|
||||
|
||||
A `CreateSnapshotRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request]
|
||||
--------------------------------------------------
|
||||
|
||||
==== Required Arguments
|
||||
The following arguments are mandatory:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-repositoryName]
|
||||
--------------------------------------------------
|
||||
<1> The name of the repository.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-snapshotName]
|
||||
--------------------------------------------------
|
||||
<1> The name of the snapshot.
|
||||
|
||||
==== Optional Arguments
|
||||
The following arguments are optional:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indices]
|
||||
--------------------------------------------------
|
||||
<1> A list of indices the snapshot is applied to.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indicesOptions]
|
||||
--------------------------------------------------
|
||||
<1> Options applied to the indices.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-partial]
|
||||
--------------------------------------------------
|
||||
<1> Set `partial` to `true` to allow a successful snapshot without the
|
||||
availability of all the indices primary shards. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-includeGlobalState]
|
||||
--------------------------------------------------
|
||||
<1> Set `includeGlobalState` to `false` to prevent writing the cluster's global
|
||||
state as part of the snapshot. Defaults to `true`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`.
|
||||
<2> Timeout to connect to the master node as a `String`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-waitForCompletion]
|
||||
--------------------------------------------------
|
||||
<1> Waits for the snapshot to be completed before a response is returned.
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a create snapshot request requires both the
|
||||
`CreateSnapshotRequest` instance and an `ActionListener` instance to be
|
||||
passed as arguments to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `CreateSnapshotRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `CreateSnapshotResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of a failure. The raised exception is provided as an
|
||||
argument.
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-response]]
|
||||
==== Snapshot Create Response
|
||||
|
||||
Use the `CreateSnapshotResponse` to retrieve information about the evaluated
|
||||
request:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates the node has started the request.
|
|
@ -0,0 +1,108 @@
|
|||
[[java-rest-high-snapshot-get-snapshots]]
|
||||
=== Get Snapshots API
|
||||
|
||||
Use the Get Snapshot API to get snapshots.
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-request]]
|
||||
==== Get Snapshots Request
|
||||
|
||||
A `GetSnapshotsRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request]
|
||||
--------------------------------------------------
|
||||
|
||||
==== Required Arguments
|
||||
The following arguments are mandatory:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-repositoryName]
|
||||
--------------------------------------------------
|
||||
<1> The name of the repository.
|
||||
|
||||
==== Optional Arguments
|
||||
The following arguments are optional:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-snapshots]
|
||||
--------------------------------------------------
|
||||
<1> An array of snapshots to get. Otherwise it will return all snapshots for a repository.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`.
|
||||
<2> Timeout to connect to the master node as a `String`.
|
||||
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-verbose]
|
||||
--------------------------------------------------
|
||||
<1> `Boolean` indicating if the response should be verbose.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-ignore-unavailable]
|
||||
--------------------------------------------------
|
||||
<1> `Boolean` indicating if unavailable snapshots should be ignored. Otherwise the request will
|
||||
fail if any of the snapshots are unavailable.
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a get snapshots request requires both the
|
||||
`GetSnapshotsRequest` instance and an `ActionListener` instance to be
|
||||
passed as arguments to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetSnapshotsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetSnapshotsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of a failure. The raised exception is provided as an
|
||||
argument.
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-response]]
|
||||
==== Get Snapshots Response
|
||||
|
||||
The returned `GetSnapshotsResponse` allows the retrieval of information about the requested
|
||||
snapshots:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-response]
|
||||
--------------------------------------------------
|
||||
<1> The REST status of a snapshot
|
||||
<2> The snapshot id
|
||||
<3> The current state of the snapshot
|
||||
<4> Information about failures that occurred during the shard snapshot process.
|
||||
<5> The snapshot start time
|
||||
<6> The snapshot end time
|
|
@ -32,16 +32,20 @@ The Java High Level REST Client supports the following Search APIs:
|
|||
* <<java-rest-high-search-scroll>>
|
||||
* <<java-rest-high-clear-scroll>>
|
||||
* <<java-rest-high-search-template>>
|
||||
* <<java-rest-high-multi-search-template>>
|
||||
* <<java-rest-high-multi-search>>
|
||||
* <<java-rest-high-field-caps>>
|
||||
* <<java-rest-high-rank-eval>>
|
||||
* <<java-rest-high-explain>>
|
||||
|
||||
include::search/search.asciidoc[]
|
||||
include::search/scroll.asciidoc[]
|
||||
include::search/multi-search.asciidoc[]
|
||||
include::search/search-template.asciidoc[]
|
||||
include::search/multi-search-template.asciidoc[]
|
||||
include::search/field-caps.asciidoc[]
|
||||
include::search/rank-eval.asciidoc[]
|
||||
include::search/explain.asciidoc[]
|
||||
|
||||
== Miscellaneous APIs
|
||||
|
||||
|
@ -49,9 +53,11 @@ The Java High Level REST Client supports the following Miscellaneous APIs:
|
|||
|
||||
* <<java-rest-high-main>>
|
||||
* <<java-rest-high-ping>>
|
||||
* <<java-rest-high-x-pack-info>>
|
||||
|
||||
include::miscellaneous/main.asciidoc[]
|
||||
include::miscellaneous/ping.asciidoc[]
|
||||
include::miscellaneous/x-pack-info.asciidoc[]
|
||||
|
||||
== Indices APIs
|
||||
|
||||
|
@ -74,9 +80,11 @@ Index Management::
|
|||
* <<java-rest-high-indices-put-settings>>
|
||||
* <<java-rest-high-get-settings>>
|
||||
* <<java-rest-high-indices-validate-query>>
|
||||
* <<java-rest-high-get-index>>
|
||||
|
||||
Mapping Management::
|
||||
* <<java-rest-high-put-mapping>>
|
||||
* <<java-rest-high-get-mappings>>
|
||||
* <<java-rest-high-get-field-mappings>>
|
||||
|
||||
Alias Management::
|
||||
|
@ -84,6 +92,7 @@ Alias Management::
|
|||
* <<java-rest-high-exists-alias>>
|
||||
* <<java-rest-high-get-alias>>
|
||||
|
||||
include::indices/analyze.asciidoc[]
|
||||
include::indices/create_index.asciidoc[]
|
||||
include::indices/delete_index.asciidoc[]
|
||||
include::indices/indices_exists.asciidoc[]
|
||||
|
@ -108,15 +117,18 @@ include::indices/get_settings.asciidoc[]
|
|||
include::indices/put_template.asciidoc[]
|
||||
include::indices/validate_query.asciidoc[]
|
||||
include::indices/get_templates.asciidoc[]
|
||||
include::indices/get_index.asciidoc[]
|
||||
|
||||
== Cluster APIs
|
||||
|
||||
The Java High Level REST Client supports the following Cluster APIs:
|
||||
|
||||
* <<java-rest-high-cluster-put-settings>>
|
||||
* <<java-rest-high-cluster-get-settings>>
|
||||
* <<java-rest-high-cluster-health>>
|
||||
|
||||
include::cluster/put_settings.asciidoc[]
|
||||
include::cluster/get_settings.asciidoc[]
|
||||
include::cluster/health.asciidoc[]
|
||||
|
||||
== Ingest APIs
|
||||
|
@ -140,12 +152,16 @@ The Java High Level REST Client supports the following Snapshot APIs:
|
|||
* <<java-rest-high-snapshot-create-repository>>
|
||||
* <<java-rest-high-snapshot-delete-repository>>
|
||||
* <<java-rest-high-snapshot-verify-repository>>
|
||||
* <<java-rest-high-snapshot-create-snapshot>>
|
||||
* <<java-rest-high-snapshot-get-snapshots>>
|
||||
* <<java-rest-high-snapshot-delete-snapshot>>
|
||||
|
||||
include::snapshot/get_repository.asciidoc[]
|
||||
include::snapshot/create_repository.asciidoc[]
|
||||
include::snapshot/delete_repository.asciidoc[]
|
||||
include::snapshot/verify_repository.asciidoc[]
|
||||
include::snapshot/create_snapshot.asciidoc[]
|
||||
include::snapshot/get_snapshots.asciidoc[]
|
||||
include::snapshot/delete_snapshot.asciidoc[]
|
||||
|
||||
== Tasks APIs
|
||||
|
@ -167,4 +183,3 @@ The Java High Level REST Client supports the following Scripts APIs:
|
|||
|
||||
include::script/get_script.asciidoc[]
|
||||
include::script/delete_script.asciidoc[]
|
||||
|
||||
|
|
|
@ -7,4 +7,6 @@ include::painless-getting-started.asciidoc[]
|
|||
|
||||
include::painless-lang-spec.asciidoc[]
|
||||
|
||||
include::painless-api-reference.asciidoc[]
|
||||
include::painless-contexts.asciidoc[]
|
||||
|
||||
include::painless-api-reference.asciidoc[]
|
|
@ -0,0 +1,58 @@
|
|||
[[painless-contexts]]
|
||||
== Painless contexts
|
||||
|
||||
:es_version: https://www.elastic.co/guide/en/elasticsearch/reference/master
|
||||
:xp_version: https://www.elastic.co/guide/en/x-pack/current
|
||||
|
||||
A Painless script is evaluated within a context. Each context has values that
|
||||
are available as local variables, a whitelist that controls the available
|
||||
classes, and the methods and fields within those classes (API), and
|
||||
if and what type of value is returned.
|
||||
|
||||
A Painless script is typically executed within one of the contexts in the table
|
||||
below. Note this is not necessarily a comprehensive list as custom plugins and
|
||||
specialized code may define new ways to use a Painless script.
|
||||
|
||||
[options="header",cols="<1,<1,<1"]
|
||||
|====
|
||||
| Name | Painless Documentation
|
||||
| Elasticsearch Documentation
|
||||
| Update | <<painless-update-context, Painless Documentation>>
|
||||
| {es_version}/docs-update.html[Elasticsearch Documentation]
|
||||
| Update by query | <<painless-update-by-query-context, Painless Documentation>>
|
||||
| {es_version}/docs-update-by-query.html[Elasticsearch Documentation]
|
||||
| Reindex | <<painless-reindex-context, Painless Documentation>>
|
||||
| {es_version}/docs-reindex.html[Elasticsearch Documentation]
|
||||
| Sort | <<painless-sort-context, Painless Documentation>>
|
||||
| {es_version}/search-request-sort.html[Elasticsearch Documentation]
|
||||
| Similarity | <<painless-similarity-context, Painless Documentation>>
|
||||
| {es_version}/index-modules-similarity.html[Elasticsearch Documentation]
|
||||
| Weight | <<painless-similarity-context, Painless Documentation>>
|
||||
| {es_version}/index-modules-similarity.html[Elasticsearch Documentation]
|
||||
| Score | <<painless-score-context, Painless Documentation>>
|
||||
| {es_version}/query-dsl-function-score-query.html[Elasticsearch Documentation]
|
||||
| Field | <<painless-field-context, Painless Documentation>>
|
||||
| {es_version}/search-request-script-fields.html[Elasticsearch Documentation]
|
||||
| Filter | <<painless-filter-context, Painless Documentation>>
|
||||
| {es_version}/query-dsl-script-query.html[Elasticsearch Documentation]
|
||||
| Minimum should match | <<painless-min-should-match-context, Painless Documentation>>
|
||||
| {es_version}/query-dsl-terms-set-query.html[Elasticsearch Documentation]
|
||||
| Metric aggregation initialization | <<painless-metric-agg-init-context, Painless Documentation>>
|
||||
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
|
||||
| Metric aggregation map | <<painless-metric-agg-map-context, Painless Documentation>>
|
||||
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
|
||||
| Metric aggregation combine | <<painless-metric-agg-combine-context, Painless Documentation>>
|
||||
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
|
||||
| Metric aggregation reduce | <<painless-metric-agg-reduce-context, Painless Documentation>>
|
||||
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
|
||||
| Bucket aggregation | <<painless-bucket-agg-context, Painless Documentation>>
|
||||
| {es_version}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation]
|
||||
| Ingest processor | <<painless-ingest-processor-context, Painless Documentation>>
|
||||
| {es_version}/script-processor.html[Elasticsearch Documentation]
|
||||
| Watcher condition | <<painless-watcher-condition-context, Painless Documentation>>
|
||||
| {xp_version}/condition-script.html[Elasticsearch Documentation]
|
||||
| Watcher transform | <<painless-watcher-transform-context, Painless Documentation>>
|
||||
| {xp_version}/transform-script.html[Elasticsearch Documentation]
|
||||
|====
|
||||
|
||||
include::painless-contexts/index.asciidoc[]
|
|
@ -0,0 +1,35 @@
|
|||
include::painless-update-context.asciidoc[]
|
||||
|
||||
include::painless-update-by-query-context.asciidoc[]
|
||||
|
||||
include::painless-reindex-context.asciidoc[]
|
||||
|
||||
include::painless-sort-context.asciidoc[]
|
||||
|
||||
include::painless-similarity-context.asciidoc[]
|
||||
|
||||
include::painless-weight-context.asciidoc[]
|
||||
|
||||
include::painless-score-context.asciidoc[]
|
||||
|
||||
include::painless-field-context.asciidoc[]
|
||||
|
||||
include::painless-filter-context.asciidoc[]
|
||||
|
||||
include::painless-min-should-match-context.asciidoc[]
|
||||
|
||||
include::painless-metric-agg-init-context.asciidoc[]
|
||||
|
||||
include::painless-metric-agg-map-context.asciidoc[]
|
||||
|
||||
include::painless-metric-agg-combine-context.asciidoc[]
|
||||
|
||||
include::painless-metric-agg-reduce-context.asciidoc[]
|
||||
|
||||
include::painless-bucket-agg-context.asciidoc[]
|
||||
|
||||
include::painless-ingest-processor-context.asciidoc[]
|
||||
|
||||
include::painless-watcher-condition-context.asciidoc[]
|
||||
|
||||
include::painless-watcher-transform-context.asciidoc[]
|
|
@ -0,0 +1,21 @@
|
|||
[[painless-bucket-agg-context]]
|
||||
=== Bucket aggregation context
|
||||
|
||||
Use a Painless script in an
|
||||
{es_version}/search-aggregations-pipeline-bucket-script-aggregation.html[bucket aggregation]
|
||||
to calculate a value as a result in a bucket.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query. The parameters
|
||||
include values defined as part of the `buckets_path`.
|
||||
|
||||
*Return*
|
||||
|
||||
numeric::
|
||||
The calculated value as the result.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,31 @@
|
|||
[[painless-field-context]]
|
||||
=== Field context
|
||||
|
||||
Use a Painless script to create a
|
||||
{es_version}/search-request-script-fields.html[script field] to return
|
||||
a customized value for each document in the results of a query.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the specified document where each field is a
|
||||
`List` of values.
|
||||
|
||||
{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
|
||||
Contains extracted JSON in a `Map` and `List` structure for the fields
|
||||
existing in a stored document.
|
||||
|
||||
`_score` (`double` read-only)::
|
||||
The original score of the specified document.
|
||||
|
||||
*Return*
|
||||
|
||||
`Object`::
|
||||
The customized value for each document.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,26 @@
|
|||
[[painless-filter-context]]
|
||||
=== Filter context
|
||||
|
||||
Use a Painless script as a {es_version}/query-dsl-script-query.html[filter] in a
|
||||
query to include and exclude documents.
|
||||
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the current document where each field is a
|
||||
`List` of values.
|
||||
|
||||
*Return*
|
||||
|
||||
`boolean`::
|
||||
Return `true` if the current document should be returned as a result of
|
||||
the query, and `false` otherwise.
|
||||
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,41 @@
|
|||
[[painless-ingest-processor-context]]
|
||||
=== Ingest processor context
|
||||
|
||||
Use a Painless script in an {es_version}/script-processor.html[ingest processor]
|
||||
to modify documents upon insertion.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`)::
|
||||
The name of the index.
|
||||
|
||||
{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`)::
|
||||
The type of document within an index.
|
||||
|
||||
`ctx` (`Map`)::
|
||||
Contains extracted JSON in a `Map` and `List` structure for the fields
|
||||
that are part of the document.
|
||||
|
||||
*Side Effects*
|
||||
|
||||
{es_version}/mapping-index-field.html[`ctx['_index']`]::
|
||||
Modify this to change the destination index for the current document.
|
||||
|
||||
{es_version}/mapping-type-field.html[`ctx['_type']`]::
|
||||
Modify this to change the type for the current document.
|
||||
|
||||
`ctx` (`Map`, read-only)::
|
||||
Modify the values in the `Map/List` structure to add, modify, or delete
|
||||
the fields of a document.
|
||||
|
||||
*Return*
|
||||
|
||||
void::
|
||||
No expected return value.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,27 @@
|
|||
[[painless-metric-agg-combine-context]]
|
||||
=== Metric aggregation combine context
|
||||
|
||||
Use a Painless script to
|
||||
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[combine]
|
||||
values for use in a scripted metric aggregation. A combine script is run once
|
||||
per shard following a <<painless-metric-agg-map-context, map script>> and is
|
||||
optional as part of a full metric aggregation.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`Map` with values available from the prior map script.
|
||||
|
||||
*Return*
|
||||
|
||||
`List`, `Map`, `String`, or primitive::
|
||||
A value collected for use in a
|
||||
<<painless-metric-agg-reduce-context, reduce script>>. If no reduce
|
||||
script is specified, the value is used as part of the result.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,32 @@
|
|||
[[painless-metric-agg-init-context]]
|
||||
=== Metric aggregation initialization context
|
||||
|
||||
Use a Painless script to
|
||||
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[initialize]
|
||||
values for use in a scripted metric aggregation. An initialization script is
|
||||
run prior to document collection once per shard and is optional as part of the
|
||||
full metric aggregation.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
Empty `Map` used to add values for use in a
|
||||
<<painless-metric-agg-map-context, map script>>.
|
||||
|
||||
*Side Effects*
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
Add values to this `Map` to for use in a map. Additional values must
|
||||
be of the type `Map`, `List`, `String` or primitive.
|
||||
|
||||
*Return*
|
||||
|
||||
`void`::
|
||||
No expected return value.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,47 @@
|
|||
[[painless-metric-agg-map-context]]
|
||||
=== Metric aggregation map context
|
||||
|
||||
Use a Painless script to
|
||||
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[map]
|
||||
values for use in a scripted metric aggregation. A map script is run once per
|
||||
collected document following an optional
|
||||
<<painless-metric-agg-init-context, initialization script>> and is required as
|
||||
part of a full metric aggregation.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`Map` used to add values for processing in a
|
||||
<<painless-metric-agg-map-context, combine script>> or returned
|
||||
directly.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the current document where each field is a
|
||||
`List` of values.
|
||||
|
||||
`_score` (`double` read-only)::
|
||||
The similarity score of the current document.
|
||||
|
||||
*Side Effects*
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
Use this `Map` to add values for processing in a combine script.
|
||||
Additional values must be of the type `Map`, `List`, `String` or
|
||||
primitive. If an initialization script is provided as part the
|
||||
aggregation then values added from the initialization script are
|
||||
available as well. If no combine script is specified, values must be
|
||||
directly stored in `_agg`. If no combine script and no
|
||||
<<painless-metric-agg-reduce-context, reduce script>> are specified, the
|
||||
values are used as the result.
|
||||
|
||||
*Return*
|
||||
|
||||
`void`::
|
||||
No expected return value.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,28 @@
|
|||
[[painless-metric-agg-reduce-context]]
|
||||
=== Metric aggregation reduce context
|
||||
|
||||
Use a Painless script to
|
||||
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[reduce]
|
||||
values to produce the result of a scripted metric aggregation. A reduce script
|
||||
is run once on the coordinating node following a
|
||||
<<painless-metric-agg-combine-context, combine script>> (or a
|
||||
<<painless-metric-agg-map-context, map script>> if no combine script is
|
||||
specified) and is optional as part of a full metric aggregation.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_aggs']` (`Map`)::
|
||||
`Map` with values available from the prior combine script (or a map
|
||||
script if no combine script is specified).
|
||||
|
||||
*Return*
|
||||
|
||||
`List`, `Map`, `String`, or primitive::
|
||||
A value used as the result.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,28 @@
|
|||
[[painless-min-should-match-context]]
|
||||
=== Minimum should match context
|
||||
|
||||
Use a Painless script to specify the
|
||||
{es_version}/query-dsl-terms-set-query.html[minimum] number of terms that a
|
||||
specified field needs to match with for a document to be part of the query
|
||||
results.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['num_terms']` (`int`, read-only)::
|
||||
The number of terms specified to match with.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the current document where each field is a
|
||||
`List` of values.
|
||||
|
||||
*Return*
|
||||
|
||||
`int`::
|
||||
The minimum number of terms required to match the current document.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,68 @@
|
|||
[[painless-reindex-context]]
|
||||
=== Reindex context
|
||||
|
||||
Use a Painless script in a {es_version}/docs-reindex.html[reindex] operation to
|
||||
add, modify, or delete fields within each document in an original index as its
|
||||
reindexed into a target index.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`ctx['_op']` (`String`)::
|
||||
The name of the operation.
|
||||
|
||||
{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`)::
|
||||
The value used to select a shard for document storage.
|
||||
|
||||
{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`)::
|
||||
The name of the index.
|
||||
|
||||
{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`)::
|
||||
The type of document within an index.
|
||||
|
||||
{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
|
||||
The unique document id.
|
||||
|
||||
`ctx['_version']` (`int`)::
|
||||
The current version of the document.
|
||||
|
||||
{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
|
||||
Contains extracted JSON in a `Map` and `List` structure for the fields
|
||||
existing in a stored document.
|
||||
|
||||
*Side Effects*
|
||||
|
||||
`ctx['_op']`::
|
||||
Use the default of `index` to update a document. Set to `none` to
|
||||
specify no operation or `delete` to delete the current document from
|
||||
the index.
|
||||
|
||||
{es_version}/mapping-routing-field.html[`ctx['_routing']`]::
|
||||
Modify this to change the routing value for the current document.
|
||||
|
||||
{es_version}/mapping-index-field.html[`ctx['_index']`]::
|
||||
Modify this to change the destination index for the current document.
|
||||
|
||||
{es_version}/mapping-type-field.html[`ctx['_type']`]::
|
||||
Modify this to change the type for the current document.
|
||||
|
||||
{es_version}/mapping-id-field.html[`ctx['_id']`]::
|
||||
Modify this to change the id for the current document.
|
||||
|
||||
`ctx['_version']` (`int`)::
|
||||
Modify this to modify the version for the current document.
|
||||
|
||||
{es_version}/mapping-source-field.html[`ctx['_source']`]::
|
||||
Modify the values in the `Map/List` structure to add, modify, or delete
|
||||
the fields of a document.
|
||||
|
||||
*Return*
|
||||
|
||||
`void`::
|
||||
No expected return value.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,27 @@
|
|||
[[painless-score-context]]
|
||||
=== Score context
|
||||
|
||||
Use a Painless script in a
|
||||
{es_version}/query-dsl-function-score-query.html[function score] to apply a new
|
||||
score to documents returned from a query.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the current document where each field is a
|
||||
`List` of values.
|
||||
|
||||
`_score` (`double` read-only)::
|
||||
The similarity score of the current document.
|
||||
|
||||
*Return*
|
||||
|
||||
`double`::
|
||||
The score for the current document.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,53 @@
|
|||
[[painless-similarity-context]]
|
||||
=== Similarity context
|
||||
|
||||
Use a Painless script to create a
|
||||
{es_version}/index-modules-similarity.html[similarity] equation for scoring
|
||||
documents in a query.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in at query-time.
|
||||
|
||||
`query.boost` (`float`, read-only)::
|
||||
The boost value if provided by the query. If this is not provided the
|
||||
value is `1.0f`.
|
||||
|
||||
`field.docCount` (`long`, read-only)::
|
||||
The number of documents that have a value for the current field.
|
||||
|
||||
`field.sumDocFreq` (`long`, read-only)::
|
||||
The sum of all terms that exist for the current field. If this is not
|
||||
available the value is `-1`.
|
||||
|
||||
`field.sumTotalTermFreq` (`long`, read-only)::
|
||||
The sum of occurrences in the index for all the terms that exist in the
|
||||
current field. If this is not available the value is `-1`.
|
||||
|
||||
`term.docFreq` (`long`, read-only)::
|
||||
The number of documents that contain the current term in the index.
|
||||
|
||||
`term.totalTermFreq` (`long`, read-only)::
|
||||
The total occurrences of the current term in the index.
|
||||
|
||||
`doc.length` (`long`, read-only)::
|
||||
The number of tokens the current document has in the current field.
|
||||
|
||||
`doc.freq` (`long`, read-only)::
|
||||
The number of occurrences of the current term in the current
|
||||
document for the current field.
|
||||
|
||||
*Return*
|
||||
|
||||
`double`::
|
||||
The similarity score for the current document.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
|
@ -0,0 +1,26 @@
|
|||
[[painless-sort-context]]
|
||||
=== Sort context
|
||||
|
||||
Use a Painless script to
|
||||
{es_version}/search-request-sort.html[sort] the documents in a query.
|
||||
|
||||
*Variables*
|
||||
|
||||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the current document where each field is a
|
||||
`List` of values.
|
||||
|
||||
`_score` (`double` read-only)::
|
||||
The similarity score of the current document.
|
||||
|
||||
*Return*
|
||||
|
||||
`double`::
|
||||
The score for the specified document.
|
||||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue