Merge branch 'master' into ccr
* master: Docs: Remove duplicate test setup Print output when the name checker IT fails (#31660) Fix syntax errors in get-snapshots docs (#31656) Docs: Fix description of percentile ranks example example (#31652) Add MultiSearchTemplate support to High Level Rest client (#30836) Add test for low-level client round-robin behaviour (#31616) SQL: Refactor package names of sql-proto and sql-shared-proto projects (#31622) Remove deprecation warnings to prepare for Gradle 5 (sourceSets.main.output.classesDirs) (#30389) Correct integTest enable logic (#31646) Fix missing get-snapshots docs reference #31645 Do not check for Azure container existence (#31617) Merge AwsS3Service and InternalAwsS3Service in a S3Service class (#31580) Upgrade gradle wrapper to 4.8 (#31525) Only set vm.max_map_count if greater than default (#31512) Add Get Snapshots High Level REST API (#31537) QA: Merge query-builder-bwc to restart test (#30979) Update reindex.asciidoc (#31626) Docs: Skip xpack snippet tests if no xpack (#31619) mute CreateSnapshotRequestTests HLRest: Fix test for explain API [TEST] Fix RemoteClusterConnectionTests Add Create Snapshot to High-Level Rest Client (#31215) Remove legacy MetaDataStateFormat (#31603) Add explain API to high-level REST client (#31387) Preserve thread context when connecting to remote cluster (#31574) Unify headers for full text queries Remove redundant 'minimum_should_match' JDBC driver prepared statement set* methods (#31494) [TEST] call yaml client close method from test suite (#31591)
This commit is contained in:
commit
2c10cd3e1d
|
@ -30,7 +30,7 @@ buildscript {
|
|||
|
||||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
// order of this seciont matters, see: https://github.com/johnrengelman/shadow/issues/336
|
||||
// order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336
|
||||
apply plugin: 'application' // have the shadow plugin provide the runShadow task
|
||||
mainClassName = 'org.openjdk.jmh.Main'
|
||||
apply plugin: 'com.github.johnrengelman.shadow' // build an uberjar with all benchmarks
|
||||
|
|
28
build.gradle
28
build.gradle
|
@ -326,6 +326,9 @@ gradle.projectsEvaluated {
|
|||
// :test:framework:test cannot run before and after :server:test
|
||||
return
|
||||
}
|
||||
if (tasks.findByPath('test') != null && tasks.findByPath('integTest') != null) {
|
||||
integTest.mustRunAfter test
|
||||
}
|
||||
configurations.all { Configuration configuration ->
|
||||
/*
|
||||
* The featureAwarePlugin configuration has a dependency on x-pack:plugin:core and x-pack:plugin:core has a dependency on the
|
||||
|
@ -575,3 +578,28 @@ gradle.projectsEvaluated {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (System.properties.get("build.compare") != null) {
|
||||
apply plugin: 'compare-gradle-builds'
|
||||
compareGradleBuilds {
|
||||
ext.referenceProject = System.properties.get("build.compare")
|
||||
doFirst {
|
||||
if (file(referenceProject).exists() == false) {
|
||||
throw new GradleException(
|
||||
"Use git worktree to check out a version to compare against to ../elasticsearch_build_reference"
|
||||
)
|
||||
}
|
||||
}
|
||||
sourceBuild {
|
||||
gradleVersion = "4.7" // does not default to gradle weapper of project dir, but current version
|
||||
projectDir = referenceProject
|
||||
tasks = ["clean", "assemble"]
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
}
|
||||
targetBuild {
|
||||
tasks = ["clean", "assemble"]
|
||||
// use -Dorg.gradle.java.home= to alter jdk versions
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,11 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import java.nio.file.Files
|
||||
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
apply plugin: 'groovy'
|
||||
plugins {
|
||||
id 'java-gradle-plugin'
|
||||
id 'groovy'
|
||||
}
|
||||
|
||||
group = 'org.elasticsearch.gradle'
|
||||
|
||||
|
@ -83,9 +85,10 @@ repositories {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
compile gradleApi()
|
||||
compile localGroovy()
|
||||
compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}"
|
||||
compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
|
||||
|
||||
compile("junit:junit:${props.getProperty('junit')}") {
|
||||
transitive = false
|
||||
}
|
||||
|
@ -97,8 +100,10 @@ dependencies {
|
|||
compile 'de.thetaphi:forbiddenapis:2.5'
|
||||
compile 'org.apache.rat:apache-rat:0.11'
|
||||
compile "org.elasticsearch:jna:4.5.1"
|
||||
testCompile "junit:junit:${props.getProperty('junit')}"
|
||||
}
|
||||
|
||||
|
||||
// Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs
|
||||
// Use logging dependency instead
|
||||
// Gradle 4.3.1 stopped releasing the logging jars to jcenter, just use the last available one
|
||||
|
@ -106,7 +111,6 @@ GradleVersion logVersion = GradleVersion.current() > GradleVersion.version('4.3'
|
|||
|
||||
dependencies {
|
||||
compileOnly "org.gradle:gradle-logging:${logVersion.getVersion()}"
|
||||
compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -114,14 +118,12 @@ dependencies {
|
|||
*****************************************************************************/
|
||||
// this will only happen when buildSrc is built on its own during build init
|
||||
if (project == rootProject) {
|
||||
|
||||
repositories {
|
||||
if (System.getProperty("repos.mavenLocal") != null) {
|
||||
mavenLocal()
|
||||
}
|
||||
mavenCentral()
|
||||
}
|
||||
test.exclude 'org/elasticsearch/test/NamingConventionsCheckBadClasses*'
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -146,9 +148,6 @@ if (project != rootProject) {
|
|||
jarHell.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
// test for elasticsearch.build tries to run with ES...
|
||||
test.enabled = false
|
||||
|
||||
// TODO: re-enable once randomizedtesting gradle code is published and removed from here
|
||||
licenseHeaders.enabled = false
|
||||
|
||||
|
@ -159,14 +158,7 @@ if (project != rootProject) {
|
|||
}
|
||||
|
||||
namingConventions {
|
||||
testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase'
|
||||
integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase'
|
||||
testClass = 'org.elasticsearch.gradle.test.GradleUnitTestCase'
|
||||
integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase'
|
||||
}
|
||||
|
||||
task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
|
||||
checkForTestsInMain = true
|
||||
testClass = namingConventions.testClass
|
||||
integTestClass = namingConventions.integTestClass
|
||||
}
|
||||
precommit.dependsOn namingConventionsMain
|
||||
}
|
||||
|
|
|
@ -1,20 +1,44 @@
|
|||
package com.carrotsearch.gradle.junit4
|
||||
|
||||
import com.carrotsearch.ant.tasks.junit4.JUnit4
|
||||
import org.gradle.api.AntBuilder
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.UnknownTaskException
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.tasks.TaskContainer
|
||||
import org.gradle.api.tasks.TaskProvider
|
||||
import org.gradle.api.tasks.testing.Test
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
|
||||
class RandomizedTestingPlugin implements Plugin<Project> {
|
||||
|
||||
static private AtomicBoolean sanityCheckConfigured = new AtomicBoolean(false)
|
||||
|
||||
void apply(Project project) {
|
||||
setupSeed(project)
|
||||
replaceTestTask(project.tasks)
|
||||
configureAnt(project.ant)
|
||||
configureSanityCheck(project)
|
||||
}
|
||||
|
||||
private static void configureSanityCheck(Project project) {
|
||||
// Check the task graph to confirm tasks were indeed replaced
|
||||
// https://github.com/elastic/elasticsearch/issues/31324
|
||||
if (sanityCheckConfigured.getAndSet(true) == false) {
|
||||
project.rootProject.getGradle().getTaskGraph().whenReady {
|
||||
List<Task> nonConforming = project.getGradle().getTaskGraph().allTasks
|
||||
.findAll { it.name == "test" }
|
||||
.findAll { (it instanceof RandomizedTestingTask) == false}
|
||||
.collect { "${it.path} -> ${it.class}" }
|
||||
if (nonConforming.isEmpty() == false) {
|
||||
throw new GradleException("Found the ${nonConforming.size()} `test` tasks:" +
|
||||
"\n ${nonConforming.join("\n ")}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -45,29 +69,32 @@ class RandomizedTestingPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
static void replaceTestTask(TaskContainer tasks) {
|
||||
Test oldTestTask = tasks.findByPath('test')
|
||||
if (oldTestTask == null) {
|
||||
// Gradle 4.8 introduced lazy tasks, thus we deal both with the `test` task as well as it's provider
|
||||
// https://github.com/gradle/gradle/issues/5730#issuecomment-398822153
|
||||
// since we can't be sure if the task was ever realized, we remove both the provider and the task
|
||||
TaskProvider<Test> oldTestProvider
|
||||
try {
|
||||
oldTestProvider = tasks.getByNameLater(Test, 'test')
|
||||
} catch (UnknownTaskException unused) {
|
||||
// no test task, ok, user will use testing task on their own
|
||||
return
|
||||
}
|
||||
tasks.remove(oldTestTask)
|
||||
Test oldTestTask = oldTestProvider.get()
|
||||
|
||||
Map properties = [
|
||||
name: 'test',
|
||||
type: RandomizedTestingTask,
|
||||
dependsOn: oldTestTask.dependsOn,
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Runs unit tests with the randomized testing framework'
|
||||
]
|
||||
RandomizedTestingTask newTestTask = tasks.create(properties)
|
||||
newTestTask.classpath = oldTestTask.classpath
|
||||
newTestTask.testClassesDir = oldTestTask.project.sourceSets.test.output.classesDir
|
||||
// since gradle 4.5, tasks immutable dependencies are "hidden" (do not show up in dependsOn)
|
||||
// so we must explicitly add a dependency on generating the test classpath
|
||||
newTestTask.dependsOn('testClasses')
|
||||
// we still have to use replace here despite the remove above because the task container knows about the provider
|
||||
// by the same name
|
||||
RandomizedTestingTask newTestTask = tasks.replace('test', RandomizedTestingTask)
|
||||
newTestTask.configure{
|
||||
group = JavaBasePlugin.VERIFICATION_GROUP
|
||||
description = 'Runs unit tests with the randomized testing framework'
|
||||
dependsOn oldTestTask.dependsOn, 'testClasses'
|
||||
classpath = oldTestTask.classpath
|
||||
testClassesDirs = oldTestTask.project.sourceSets.test.output.classesDirs
|
||||
}
|
||||
|
||||
// hack so check task depends on custom test
|
||||
Task checkTask = tasks.findByPath('check')
|
||||
Task checkTask = tasks.getByName('check')
|
||||
checkTask.dependsOn.remove(oldTestProvider)
|
||||
checkTask.dependsOn.remove(oldTestTask)
|
||||
checkTask.dependsOn.add(newTestTask)
|
||||
}
|
||||
|
|
|
@ -6,18 +6,20 @@ import groovy.xml.NamespaceBuilder
|
|||
import groovy.xml.NamespaceBuilderSupport
|
||||
import org.apache.tools.ant.BuildException
|
||||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.Project
|
||||
import org.apache.tools.ant.RuntimeConfigurable
|
||||
import org.apache.tools.ant.UnknownElement
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.file.FileTreeElement
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.api.specs.Spec
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputDirectory
|
||||
import org.gradle.api.tasks.Optional
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.api.tasks.options.Option
|
||||
import org.gradle.api.tasks.util.PatternFilterable
|
||||
import org.gradle.api.tasks.util.PatternSet
|
||||
import org.gradle.internal.logging.progress.ProgressLoggerFactory
|
||||
|
@ -43,8 +45,8 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
@Input
|
||||
String parallelism = '1'
|
||||
|
||||
@InputDirectory
|
||||
File testClassesDir
|
||||
@Input
|
||||
FileCollection testClassesDirs
|
||||
|
||||
@Optional
|
||||
@Input
|
||||
|
@ -220,7 +222,7 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
listener = new DefaultLogger(
|
||||
errorPrintStream: System.err,
|
||||
outputPrintStream: System.out,
|
||||
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
|
||||
messageOutputLevel: Project.MSG_INFO)
|
||||
} else {
|
||||
// we want to buffer the info, and emit it if the test fails
|
||||
antLoggingBuffer = new ByteArrayOutputStream()
|
||||
|
@ -228,7 +230,7 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
listener = new DefaultLogger(
|
||||
errorPrintStream: stream,
|
||||
outputPrintStream: stream,
|
||||
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
|
||||
messageOutputLevel: Project.MSG_INFO)
|
||||
}
|
||||
project.ant.project.addBuildListener(listener)
|
||||
}
|
||||
|
@ -251,12 +253,10 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
if (argLine != null) {
|
||||
jvmarg(line: argLine)
|
||||
}
|
||||
fileset(dir: testClassesDir) {
|
||||
for (String includePattern : patternSet.getIncludes()) {
|
||||
include(name: includePattern)
|
||||
}
|
||||
for (String excludePattern : patternSet.getExcludes()) {
|
||||
exclude(name: excludePattern)
|
||||
testClassesDirs.each { testClassDir ->
|
||||
fileset(dir: testClassDir) {
|
||||
patternSet.getIncludes().each { include(name: it) }
|
||||
patternSet.getExcludes().each { exclude(name: it) }
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, Object> prop : systemProperties) {
|
||||
|
|
|
@ -348,7 +348,9 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// just a self contained test-fixture configuration, likely transitive and hellacious
|
||||
return
|
||||
}
|
||||
configuration.resolutionStrategy.failOnVersionConflict()
|
||||
configuration.resolutionStrategy {
|
||||
failOnVersionConflict()
|
||||
}
|
||||
})
|
||||
|
||||
// force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself
|
||||
|
@ -475,13 +477,17 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
}
|
||||
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t ->
|
||||
// place the pom next to the jar it is for
|
||||
t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom")
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
assemble.dependsOn(t)
|
||||
// Work around Gradle 4.8 issue until we `enableFeaturePreview('STABLE_PUBLISHING')`
|
||||
// https://github.com/gradle/gradle/issues/5696#issuecomment-396965185
|
||||
project.getGradle().getTaskGraph().whenReady {
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t ->
|
||||
// place the pom next to the jar it is for
|
||||
t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom")
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
assemble.dependsOn(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -625,6 +631,10 @@ class BuildPlugin implements Plugin<Project> {
|
|||
jarTask.manifest.attributes('Change': shortHash)
|
||||
}
|
||||
}
|
||||
// Force manifest entries that change by nature to a constant to be able to compare builds more effectively
|
||||
if (System.properties.getProperty("build.compare_friendly", "false") == "true") {
|
||||
jarTask.manifest.getAttributes().clear()
|
||||
}
|
||||
}
|
||||
// add license/notice files
|
||||
project.afterEvaluate {
|
||||
|
@ -741,7 +751,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.extensions.add('additionalTest', { String name, Closure config ->
|
||||
RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class)
|
||||
additionalTest.classpath = test.classpath
|
||||
additionalTest.testClassesDir = test.testClassesDir
|
||||
additionalTest.testClassesDirs = test.testClassesDirs
|
||||
additionalTest.configure(commonTestConfig(project))
|
||||
additionalTest.configure(config)
|
||||
additionalTest.dependsOn(project.tasks.testClasses)
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.tasks.Exec
|
||||
|
||||
/**
|
||||
* A wrapper around gradle's Exec task to capture output and log on error.
|
||||
*/
|
||||
class LoggedExec extends Exec {
|
||||
|
||||
protected ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
|
||||
LoggedExec() {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
standardOutput = output
|
||||
errorOutput = output
|
||||
ignoreExitValue = true
|
||||
doLast {
|
||||
if (execResult.exitValue != 0) {
|
||||
output.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package org.elasticsearch.gradle;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.tasks.Exec;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A wrapper around gradle's Exec task to capture output and log on error.
|
||||
*/
|
||||
public class LoggedExec extends Exec {
|
||||
|
||||
protected ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
|
||||
public LoggedExec() {
|
||||
if (getLogger().isInfoEnabled() == false) {
|
||||
setStandardOutput(output);
|
||||
setErrorOutput(output);
|
||||
setIgnoreExitValue(true);
|
||||
doLast(new Closure<Void>(this, this) {
|
||||
public void doCall(Task it) throws IOException {
|
||||
if (getExecResult().getExitValue() != 0) {
|
||||
for (String line : output.toString("UTF-8").split("\\R")) {
|
||||
getLogger().error(line);
|
||||
}
|
||||
throw new GradleException(
|
||||
"Process \'" + getExecutable() + " " +
|
||||
getArgs().stream().collect(Collectors.joining(" "))+
|
||||
"\' finished with non-zero exit value " +
|
||||
String.valueOf(getExecResult().getExitValue())
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
/**
|
||||
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
||||
*/
|
||||
class VersionProperties {
|
||||
static final Version elasticsearch
|
||||
static final String lucene
|
||||
static final Map<String, String> versions = new HashMap<>()
|
||||
static {
|
||||
Properties props = new Properties()
|
||||
InputStream propsStream = VersionProperties.class.getResourceAsStream('/version.properties')
|
||||
if (propsStream == null) {
|
||||
throw new RuntimeException('/version.properties resource missing')
|
||||
}
|
||||
props.load(propsStream)
|
||||
elasticsearch = Version.fromString(props.getProperty('elasticsearch'))
|
||||
lucene = props.getProperty('lucene')
|
||||
for (String property : props.stringPropertyNames()) {
|
||||
versions.put(property, props.getProperty(property))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
||||
*/
|
||||
public class VersionProperties {
|
||||
public static Version getElasticsearch() {
|
||||
return elasticsearch;
|
||||
}
|
||||
|
||||
public static String getLucene() {
|
||||
return lucene;
|
||||
}
|
||||
|
||||
public static Map<String, String> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
|
||||
private static final Version elasticsearch;
|
||||
private static final String lucene;
|
||||
private static final Map<String, String> versions = new HashMap<String, String>();
|
||||
static {
|
||||
Properties props = getVersionProperties();
|
||||
elasticsearch = Version.fromString(props.getProperty("elasticsearch"));
|
||||
lucene = props.getProperty("lucene");
|
||||
for (String property : props.stringPropertyNames()) {
|
||||
versions.put(property, props.getProperty(property));
|
||||
}
|
||||
}
|
||||
|
||||
private static Properties getVersionProperties() {
|
||||
Properties props = new Properties();
|
||||
InputStream propsStream = VersionProperties.class.getResourceAsStream("/version.properties");
|
||||
if (propsStream == null) {
|
||||
throw new RuntimeException("/version.properties resource missing");
|
||||
}
|
||||
try {
|
||||
props.load(propsStream);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return props;
|
||||
}
|
||||
}
|
|
@ -237,6 +237,18 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
current.println(" - stash_in_path")
|
||||
current.println(" - stash_path_replace")
|
||||
current.println(" - warnings")
|
||||
if (test.testEnv != null) {
|
||||
switch (test.testEnv) {
|
||||
case 'basic':
|
||||
case 'gold':
|
||||
case 'platinum':
|
||||
current.println(" - xpack")
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException('Unsupported testEnv: '
|
||||
+ test.testEnv)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (test.skipTest) {
|
||||
if (test.continued) {
|
||||
|
|
|
@ -84,6 +84,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
Snippet snippet = null
|
||||
StringBuilder contents = null
|
||||
List substitutions = null
|
||||
String testEnv = null
|
||||
Closure emit = {
|
||||
snippet.contents = contents.toString()
|
||||
contents = null
|
||||
|
@ -143,10 +144,14 @@ public class SnippetsTask extends DefaultTask {
|
|||
}
|
||||
file.eachLine('UTF-8') { String line, int lineNumber ->
|
||||
Matcher matcher
|
||||
matcher = line =~ /\[testenv="([^"]+)"\]\s*/
|
||||
if (matcher.matches()) {
|
||||
testEnv = matcher.group(1)
|
||||
}
|
||||
if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet
|
||||
if (snippet == null) {
|
||||
Path path = docs.dir.toPath().relativize(file.toPath())
|
||||
snippet = new Snippet(path: path, start: lineNumber)
|
||||
snippet = new Snippet(path: path, start: lineNumber, testEnv: testEnv)
|
||||
if (lastLanguageLine == lineNumber - 1) {
|
||||
snippet.language = lastLanguage
|
||||
}
|
||||
|
@ -297,6 +302,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
int start
|
||||
int end = NOT_FINISHED
|
||||
String contents
|
||||
String testEnv
|
||||
|
||||
Boolean console = null
|
||||
boolean test = false
|
||||
|
@ -321,6 +327,9 @@ public class SnippetsTask extends DefaultTask {
|
|||
}
|
||||
if (test) {
|
||||
result += '// TEST'
|
||||
if (testEnv != null) {
|
||||
result += "[testenv=$testEnv]"
|
||||
}
|
||||
if (catchPart) {
|
||||
result += "[catch: $catchPart]"
|
||||
}
|
||||
|
|
|
@ -50,11 +50,11 @@ public class LoggerUsageTask extends LoggedExec {
|
|||
List files = []
|
||||
// But only if the source sets that will make them exist
|
||||
if (project.sourceSets.findByName("main")) {
|
||||
files.add(project.sourceSets.main.output.classesDir)
|
||||
files.addAll(project.sourceSets.main.output.classesDirs.getFiles())
|
||||
dependsOn project.tasks.classes
|
||||
}
|
||||
if (project.sourceSets.findByName("test")) {
|
||||
files.add(project.sourceSets.test.output.classesDir)
|
||||
files.addAll(project.sourceSets.test.output.classesDirs.getFiles())
|
||||
dependsOn project.tasks.testClasses
|
||||
}
|
||||
/* In an extra twist, it isn't good enough that the source set
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.gradle.api.artifacts.Dependency
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
/**
|
||||
* Runs NamingConventionsCheck on a classpath/directory combo to verify that
|
||||
* tests are named according to our conventions so they'll be picked up by
|
||||
* gradle. Read the Javadoc for NamingConventionsCheck to learn more.
|
||||
*/
|
||||
public class NamingConventionsTask extends LoggedExec {
|
||||
/**
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
File successMarker = new File(project.buildDir, "markers/${this.name}")
|
||||
|
||||
/**
|
||||
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
|
||||
* integ tests correctly.
|
||||
*/
|
||||
@Input
|
||||
boolean skipIntegTestInDisguise = false
|
||||
|
||||
/**
|
||||
* Superclass for all tests.
|
||||
*/
|
||||
@Input
|
||||
String testClass = 'org.apache.lucene.util.LuceneTestCase'
|
||||
|
||||
/**
|
||||
* Superclass for all integration tests.
|
||||
*/
|
||||
@Input
|
||||
String integTestClass = 'org.elasticsearch.test.ESIntegTestCase'
|
||||
|
||||
/**
|
||||
* Should the test also check the main classpath for test classes instead of
|
||||
* doing the usual checks to the test classpath.
|
||||
*/
|
||||
@Input
|
||||
boolean checkForTestsInMain = false;
|
||||
|
||||
public NamingConventionsTask() {
|
||||
// Extra classpath contains the actual test
|
||||
if (false == project.configurations.names.contains('namingConventions')) {
|
||||
project.configurations.create('namingConventions')
|
||||
Dependency buildToolsDep = project.dependencies.add('namingConventions',
|
||||
"org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}")
|
||||
buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts.
|
||||
}
|
||||
FileCollection classpath = project.files(project.configurations.namingConventions,
|
||||
project.sourceSets.test.compileClasspath,
|
||||
project.sourceSets.test.output)
|
||||
dependsOn(classpath)
|
||||
inputs.files(classpath)
|
||||
description = "Tests that test classes aren't misnamed or misplaced"
|
||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
if (false == checkForTestsInMain) {
|
||||
/* This task is created by default for all subprojects with this
|
||||
* setting and there is no point in running it if the files don't
|
||||
* exist. */
|
||||
onlyIf { project.sourceSets.test.output.classesDir.exists() }
|
||||
}
|
||||
|
||||
/*
|
||||
* We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
|
||||
* ready for us. Strangely neither one on their own are good enough.
|
||||
*/
|
||||
project.afterEvaluate {
|
||||
doFirst {
|
||||
args('-Djna.nosys=true')
|
||||
args('-cp', classpath.asPath, 'org.elasticsearch.test.NamingConventionsCheck')
|
||||
args('--test-class', testClass)
|
||||
if (skipIntegTestInDisguise) {
|
||||
args('--skip-integ-tests-in-disguise')
|
||||
} else {
|
||||
args('--integ-test-class', integTestClass)
|
||||
}
|
||||
/*
|
||||
* The test framework has classes that fail the checks to validate that the checks fail properly.
|
||||
* Since these would cause the build to fail we have to ignore them with this parameter. The
|
||||
* process of ignoring them lets us validate that they were found so this ignore parameter acts
|
||||
* as the test for the NamingConventionsCheck.
|
||||
*/
|
||||
if (':build-tools'.equals(project.path)) {
|
||||
args('--self-test')
|
||||
}
|
||||
if (checkForTestsInMain) {
|
||||
args('--main')
|
||||
args('--')
|
||||
args(project.sourceSets.main.output.classesDir.absolutePath)
|
||||
} else {
|
||||
args('--')
|
||||
args(project.sourceSets.test.output.classesDir.absolutePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
doLast { successMarker.setText("", 'UTF-8') }
|
||||
}
|
||||
}
|
|
@ -0,0 +1,185 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.codehaus.groovy.runtime.ResourceGroovyMethods;
|
||||
import org.elasticsearch.gradle.LoggedExec;
|
||||
import org.elasticsearch.test.NamingConventionsCheck;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.api.plugins.JavaPluginConvention;
|
||||
import org.gradle.api.tasks.AbstractExecTask;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.SourceSetContainer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Runs NamingConventionsCheck on a classpath/directory combo to verify that
|
||||
* tests are named according to our conventions so they'll be picked up by
|
||||
* gradle. Read the Javadoc for NamingConventionsCheck to learn more.
|
||||
*/
|
||||
public class NamingConventionsTask extends LoggedExec {
|
||||
public NamingConventionsTask() {
|
||||
setDescription("Tests that test classes aren't misnamed or misplaced");
|
||||
final Project project = getProject();
|
||||
|
||||
SourceSetContainer sourceSets = getJavaSourceSets();
|
||||
final FileCollection classpath = project.files(
|
||||
// This works because the class only depends on one class from junit that will be available from the
|
||||
// tests compile classpath. It's the most straight forward way of telling Java where to find the main
|
||||
// class.
|
||||
NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation().getPath(),
|
||||
// the tests to be loaded
|
||||
checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(),
|
||||
sourceSets.getByName("test").getCompileClasspath(),
|
||||
sourceSets.getByName("test").getOutput()
|
||||
);
|
||||
dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName())));
|
||||
getInputs().files(classpath);
|
||||
|
||||
setExecutable(new File(
|
||||
Objects.requireNonNull(
|
||||
project.getExtensions().getByType(ExtraPropertiesExtension.class).get("runtimeJavaHome")
|
||||
).toString(),
|
||||
"bin/java")
|
||||
);
|
||||
|
||||
if (checkForTestsInMain == false) {
|
||||
/* This task is created by default for all subprojects with this
|
||||
* setting and there is no point in running it if the files don't
|
||||
* exist. */
|
||||
onlyIf((unused) -> getExistingClassesDirs().isEmpty() == false);
|
||||
}
|
||||
|
||||
/*
|
||||
* We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
|
||||
* ready for us. Strangely neither one on their own are good enough.
|
||||
*/
|
||||
project.afterEvaluate(new Closure<Task>(this, this) {
|
||||
public Task doCall(Project it) {
|
||||
return doFirst(new Closure<AbstractExecTask>(NamingConventionsTask.this, NamingConventionsTask.this) {
|
||||
public AbstractExecTask doCall(Task it) {
|
||||
args("-Djna.nosys=true");
|
||||
args("-cp", classpath.getAsPath(), "org.elasticsearch.test.NamingConventionsCheck");
|
||||
args("--test-class", getTestClass());
|
||||
if (skipIntegTestInDisguise) {
|
||||
args("--skip-integ-tests-in-disguise");
|
||||
} else {
|
||||
args("--integ-test-class", getIntegTestClass());
|
||||
}
|
||||
if (getCheckForTestsInMain()) {
|
||||
args("--main");
|
||||
args("--");
|
||||
} else {
|
||||
args("--");
|
||||
}
|
||||
return args(getExistingClassesDirs().getAsPath());
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
doLast(new Closure<Object>(this, this) {
|
||||
public void doCall(Task it) {
|
||||
try {
|
||||
ResourceGroovyMethods.setText(getSuccessMarker(), "", "UTF-8");
|
||||
} catch (IOException e) {
|
||||
throw new GradleException("io exception", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private SourceSetContainer getJavaSourceSets() {
|
||||
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
|
||||
}
|
||||
|
||||
public FileCollection getExistingClassesDirs() {
|
||||
FileCollection classesDirs = getJavaSourceSets().getByName(checkForTestsInMain ? "main" : "test")
|
||||
.getOutput().getClassesDirs();
|
||||
return classesDirs.filter(it -> it.exists());
|
||||
}
|
||||
|
||||
public File getSuccessMarker() {
|
||||
return successMarker;
|
||||
}
|
||||
|
||||
public void setSuccessMarker(File successMarker) {
|
||||
this.successMarker = successMarker;
|
||||
}
|
||||
|
||||
public boolean getSkipIntegTestInDisguise() {
|
||||
return skipIntegTestInDisguise;
|
||||
}
|
||||
|
||||
public boolean isSkipIntegTestInDisguise() {
|
||||
return skipIntegTestInDisguise;
|
||||
}
|
||||
|
||||
public void setSkipIntegTestInDisguise(boolean skipIntegTestInDisguise) {
|
||||
this.skipIntegTestInDisguise = skipIntegTestInDisguise;
|
||||
}
|
||||
|
||||
public String getTestClass() {
|
||||
return testClass;
|
||||
}
|
||||
|
||||
public void setTestClass(String testClass) {
|
||||
this.testClass = testClass;
|
||||
}
|
||||
|
||||
public String getIntegTestClass() {
|
||||
return integTestClass;
|
||||
}
|
||||
|
||||
public void setIntegTestClass(String integTestClass) {
|
||||
this.integTestClass = integTestClass;
|
||||
}
|
||||
|
||||
public boolean getCheckForTestsInMain() {
|
||||
return checkForTestsInMain;
|
||||
}
|
||||
|
||||
public boolean isCheckForTestsInMain() {
|
||||
return checkForTestsInMain;
|
||||
}
|
||||
|
||||
public void setCheckForTestsInMain(boolean checkForTestsInMain) {
|
||||
this.checkForTestsInMain = checkForTestsInMain;
|
||||
}
|
||||
|
||||
/**
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
private File successMarker = new File(getProject().getBuildDir(), "markers/" + this.getName());
|
||||
/**
|
||||
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
|
||||
* integ tests correctly.
|
||||
*/
|
||||
@Input
|
||||
private boolean skipIntegTestInDisguise = false;
|
||||
/**
|
||||
* Superclass for all tests.
|
||||
*/
|
||||
@Input
|
||||
private String testClass = "org.apache.lucene.util.LuceneTestCase";
|
||||
/**
|
||||
* Superclass for all integration tests.
|
||||
*/
|
||||
@Input
|
||||
private String integTestClass = "org.elasticsearch.test.ESIntegTestCase";
|
||||
/**
|
||||
* Should the test also check the main classpath for test classes instead of
|
||||
* doing the usual checks to the test classpath.
|
||||
*/
|
||||
@Input
|
||||
private boolean checkForTestsInMain = false;
|
||||
}
|
|
@ -61,7 +61,7 @@ public class RestIntegTestTask extends DefaultTask {
|
|||
clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
|
||||
runner.dependsOn(clusterInit)
|
||||
runner.classpath = project.sourceSets.test.runtimeClasspath
|
||||
runner.testClassesDir = project.sourceSets.test.output.classesDir
|
||||
runner.testClassesDirs = project.sourceSets.test.output.classesDirs
|
||||
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
|
||||
|
||||
// start with the common test configuration
|
||||
|
|
|
@ -47,7 +47,7 @@ public class StandaloneTestPlugin implements Plugin<Project> {
|
|||
test.configure(BuildPlugin.commonTestConfig(project))
|
||||
BuildPlugin.configureCompile(project)
|
||||
test.classpath = project.sourceSets.test.runtimeClasspath
|
||||
test.testClassesDir project.sourceSets.test.output.classesDir
|
||||
test.testClassesDirs = project.sourceSets.test.output.classesDirs
|
||||
test.mustRunAfter(project.precommit)
|
||||
project.check.dependsOn(test)
|
||||
|
||||
|
|
|
@ -22,14 +22,9 @@ import org.apache.commons.io.output.TeeOutputStream
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.Optional
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.internal.logging.progress.ProgressLoggerFactory
|
||||
|
||||
import javax.inject.Inject
|
||||
import java.util.concurrent.CountDownLatch
|
||||
import java.util.concurrent.locks.Lock
|
||||
import java.util.concurrent.locks.ReadWriteLock
|
||||
import java.util.concurrent.locks.ReentrantLock
|
||||
|
||||
/**
|
||||
* Runs a vagrant command. Pretty much like Exec task but with a nicer output
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.nio.file.FileVisitResult;
|
||||
|
@ -30,6 +31,7 @@ import java.nio.file.attribute.BasicFileAttributes;
|
|||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Checks that all tests in a directory are named according to our naming conventions. This is important because tests that do not follow
|
||||
|
@ -37,19 +39,13 @@ import java.util.Set;
|
|||
* a class with a main method so gradle can call it for each project. This has the advantage of allowing gradle to calculate when it is
|
||||
* {@code UP-TO-DATE} so it can be skipped if the compiled classes haven't changed. This is useful on large modules for which checking all
|
||||
* the modules can be slow.
|
||||
*
|
||||
* Annoyingly, this cannot be tested using standard unit tests because to do so you'd have to declare classes that violate the rules. That
|
||||
* would cause the test fail which would prevent the build from passing. So we have to make a mechanism for removing those test classes. Now
|
||||
* that we have such a mechanism it isn't much work to fail the process if we don't detect the offending classes. Thus, the funky
|
||||
* {@code --self-test} that is only run in the test:framework project.
|
||||
*/
|
||||
public class NamingConventionsCheck {
|
||||
public static void main(String[] args) throws IOException {
|
||||
Class<?> testClass = null;
|
||||
Class<?> integTestClass = null;
|
||||
Path rootPath = null;
|
||||
String rootPathList = null;
|
||||
boolean skipIntegTestsInDisguise = false;
|
||||
boolean selfTest = false;
|
||||
boolean checkMainClasses = false;
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
String arg = args[i];
|
||||
|
@ -63,14 +59,11 @@ public class NamingConventionsCheck {
|
|||
case "--skip-integ-tests-in-disguise":
|
||||
skipIntegTestsInDisguise = true;
|
||||
break;
|
||||
case "--self-test":
|
||||
selfTest = true;
|
||||
break;
|
||||
case "--main":
|
||||
checkMainClasses = true;
|
||||
break;
|
||||
case "--":
|
||||
rootPath = Paths.get(args[++i]);
|
||||
rootPathList = args[++i];
|
||||
break;
|
||||
default:
|
||||
fail("unsupported argument '" + arg + "'");
|
||||
|
@ -78,44 +71,49 @@ public class NamingConventionsCheck {
|
|||
}
|
||||
|
||||
NamingConventionsCheck check = new NamingConventionsCheck(testClass, integTestClass);
|
||||
if (checkMainClasses) {
|
||||
check.checkMain(rootPath);
|
||||
} else {
|
||||
check.checkTests(rootPath, skipIntegTestsInDisguise);
|
||||
}
|
||||
|
||||
if (selfTest) {
|
||||
for (String rootDir : rootPathList.split(Pattern.quote(File.pathSeparator))) {
|
||||
Path rootPath = Paths.get(rootDir);
|
||||
if (checkMainClasses) {
|
||||
assertViolation(NamingConventionsCheckInMainTests.class.getName(), check.testsInMain);
|
||||
assertViolation(NamingConventionsCheckInMainIT.class.getName(), check.testsInMain);
|
||||
check.checkMain(rootPath);
|
||||
} else {
|
||||
assertViolation("WrongName", check.missingSuffix);
|
||||
assertViolation("WrongNameTheSecond", check.missingSuffix);
|
||||
assertViolation("DummyAbstractTests", check.notRunnable);
|
||||
assertViolation("DummyInterfaceTests", check.notRunnable);
|
||||
assertViolation("InnerTests", check.innerClasses);
|
||||
assertViolation("NotImplementingTests", check.notImplementing);
|
||||
assertViolation("PlainUnit", check.pureUnitTest);
|
||||
check.checkTests(rootPath, skipIntegTestsInDisguise);
|
||||
}
|
||||
}
|
||||
|
||||
// Now we should have no violations
|
||||
assertNoViolations(
|
||||
int exitCode = 0 ;
|
||||
exitCode += countAndPrintViolations(
|
||||
"Not all subclasses of " + check.testClass.getSimpleName()
|
||||
+ " match the naming convention. Concrete classes must end with [Tests]",
|
||||
check.missingSuffix);
|
||||
assertNoViolations("Classes ending with [Tests] are abstract or interfaces", check.notRunnable);
|
||||
assertNoViolations("Found inner classes that are tests, which are excluded from the test runner", check.innerClasses);
|
||||
assertNoViolations("Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]", check.pureUnitTest);
|
||||
assertNoViolations("Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]", check.notImplementing);
|
||||
assertNoViolations(
|
||||
"Classes ending with [Tests] or [IT] or extending [" + check.testClass.getSimpleName() + "] must be in src/test/java",
|
||||
check.testsInMain);
|
||||
check.missingSuffix) ;
|
||||
exitCode += countAndPrintViolations(
|
||||
"Classes ending with [Tests] are abstract or interfaces",
|
||||
check.notRunnable
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Found inner classes that are tests, which are excluded from the test runner",
|
||||
check.innerClasses
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]",
|
||||
check.pureUnitTest
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]",
|
||||
check.notImplementing
|
||||
);
|
||||
exitCode += countAndPrintViolations(
|
||||
"Classes ending with [Tests] or [IT] or extending [" +
|
||||
check.testClass.getSimpleName() + "] must be in src/test/java",
|
||||
check.testsInMain
|
||||
);
|
||||
if (skipIntegTestsInDisguise == false) {
|
||||
assertNoViolations(
|
||||
"Subclasses of " + check.integTestClass.getSimpleName() + " should end with IT as they are integration tests",
|
||||
check.integTestsInDisguise);
|
||||
exitCode += countAndPrintViolations("Subclasses of " + check.integTestClass.getSimpleName() +
|
||||
" should end with IT as they are integration tests",
|
||||
check.integTestsInDisguise
|
||||
);
|
||||
}
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
private final Set<Class<?>> notImplementing = new HashSet<>();
|
||||
|
@ -138,7 +136,9 @@ public class NamingConventionsCheck {
|
|||
Files.walkFileTree(rootPath, new TestClassVisitor() {
|
||||
@Override
|
||||
protected void visitTestClass(Class<?> clazz) {
|
||||
if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) {
|
||||
if (skipTestsInDisguised == false &&
|
||||
integTestClass.isAssignableFrom(clazz) &&
|
||||
clazz != integTestClass) {
|
||||
integTestsInDisguise.add(clazz);
|
||||
}
|
||||
if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
|
||||
|
@ -196,18 +196,15 @@ public class NamingConventionsCheck {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fail the process if there are any violations in the set. Named to look like a junit assertion even though it isn't because it is
|
||||
* similar enough.
|
||||
*/
|
||||
private static void assertNoViolations(String message, Set<Class<?>> set) {
|
||||
private static int countAndPrintViolations(String message, Set<Class<?>> set) {
|
||||
if (false == set.isEmpty()) {
|
||||
System.err.println(message + ":");
|
||||
for (Class<?> bad : set) {
|
||||
System.err.println(" * " + bad.getName());
|
||||
}
|
||||
System.exit(1);
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -254,15 +251,16 @@ public class NamingConventionsCheck {
|
|||
* Visit classes named like a test.
|
||||
*/
|
||||
protected abstract void visitTestClass(Class<?> clazz);
|
||||
|
||||
/**
|
||||
* Visit classes named like an integration test.
|
||||
*/
|
||||
protected abstract void visitIntegrationTestClass(Class<?> clazz);
|
||||
|
||||
/**
|
||||
* Visit classes not named like a test at all.
|
||||
*/
|
||||
protected abstract void visitOtherClass(Class<?> clazz);
|
||||
|
||||
@Override
|
||||
public final FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||
// First we visit the root directory
|
||||
|
@ -310,5 +308,7 @@ public class NamingConventionsCheck {
|
|||
public final FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
|
||||
throw exc;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
package org.elasticsearch.gradle
|
||||
|
||||
class VersionCollectionTest extends GroovyTestCase {
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase
|
||||
import org.junit.Test
|
||||
|
||||
class VersionCollectionTests extends GradleUnitTestCase {
|
||||
|
||||
String formatVersion(String version) {
|
||||
return " public static final Version V_${version.replaceAll("\\.", "_")} "
|
||||
|
@ -16,6 +19,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* branched from Major-1.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior
|
||||
* from 7.0 perspective, or master at the time of this writing.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMajorUnreleasedWithExistingStagedMinorRelease() {
|
||||
VersionCollection vc = new VersionCollection(allVersions)
|
||||
assertNotNull(vc)
|
||||
|
@ -51,6 +55,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test
|
||||
* simulates the behavior from 7.0 perspective, or master at the time of this writing.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMajorUnreleasedWithoutStagedMinorRelease() {
|
||||
List localVersion = allVersions.clone()
|
||||
localVersion.add(formatVersion('6.2.1')) // release 6.2
|
||||
|
@ -89,6 +94,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* branched from Major.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior
|
||||
* from 6.3 perspective.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMinorReleasedBranch() {
|
||||
List localVersion = allVersions.clone()
|
||||
localVersion.removeAll { it.toString().contains('7_0_0')} // remove all the 7.x so that the actual version is 6.3 (6.x)
|
||||
|
@ -126,6 +132,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test
|
||||
* simulates the behavior from 6.3 perspective.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstMinorReleasedBranchNoStagedMinor() {
|
||||
List localVersion = allVersions.clone()
|
||||
// remove all the 7.x and add a 6.2.1 which means 6.2 was released
|
||||
|
@ -162,6 +169,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* This validates the logic of being on a released minor branch. At the time of writing, 6.2 is unreleased, so this is equivalent of being
|
||||
* on 6.1.
|
||||
*/
|
||||
@Test
|
||||
void testAgainstOldMinor() {
|
||||
|
||||
List localVersion = allVersions.clone()
|
||||
|
@ -195,6 +203,7 @@ class VersionCollectionTest extends GroovyTestCase {
|
|||
* This validates the lower bound of wire compat, which is 5.0. It also validates that the span of 2.x to 5.x if it is decided to port
|
||||
* this fix all the way to the maint 5.6 release.
|
||||
*/
|
||||
@Test
|
||||
void testFloorOfWireCompatVersions() {
|
||||
List localVersion = [formatVersion('2.0.0'), formatVersion('2.0.1'), formatVersion('2.1.0'), formatVersion('2.1.1'),
|
||||
formatVersion('5.0.0'), formatVersion('5.0.1'), formatVersion('5.1.0'), formatVersion('5.1.1'),
|
|
@ -19,31 +19,41 @@
|
|||
|
||||
package org.elasticsearch.gradle.doc
|
||||
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.junit.Rule
|
||||
import org.junit.rules.ExpectedException
|
||||
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote
|
||||
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck
|
||||
|
||||
class RestTestFromSnippetsTaskTests extends GradleUnitTestCase {
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedEx = ExpectedException.none()
|
||||
|
||||
class RestTestFromSnippetsTaskTest extends GroovyTestCase {
|
||||
void testInvalidBlockQuote() {
|
||||
String input = "\"foo\": \"\"\"bar\"";
|
||||
String message = shouldFail({ replaceBlockQuote(input) });
|
||||
assertEquals("Invalid block quote starting at 7 in:\n$input", message);
|
||||
String input = "\"foo\": \"\"\"bar\""
|
||||
expectedEx.expect(InvalidUserDataException.class)
|
||||
expectedEx.expectMessage("Invalid block quote starting at 7 in:\n$input")
|
||||
replaceBlockQuote(input)
|
||||
}
|
||||
|
||||
void testSimpleBlockQuote() {
|
||||
assertEquals("\"foo\": \"bort baz\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""))
|
||||
}
|
||||
|
||||
void testMultipleBlockQuotes() {
|
||||
assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""))
|
||||
}
|
||||
|
||||
void testEscapingInBlockQuote() {
|
||||
assertEquals("\"foo\": \"bort\\\" baz\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""))
|
||||
assertEquals("\"foo\": \"bort\\n baz\"",
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""));
|
||||
replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""))
|
||||
}
|
||||
|
||||
void testIsDocWriteRequest() {
|
|
@ -0,0 +1,75 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.gradle.testkit.runner.TaskOutcome;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class NamingConventionsTaskIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testPluginCanBeApplied() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("hello", "-s", "-PcheckForTestsInMain=false")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":hello").getOutcome());
|
||||
String output = result.getOutput();
|
||||
assertTrue(output, output.contains("build plugin can be applied"));
|
||||
}
|
||||
|
||||
public void testNameCheckFailsAsItShould() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("namingConventions", "-s", "-PcheckForTestsInMain=false")
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertNotNull("task did not run", result.task(":namingConventions"));
|
||||
assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome());
|
||||
String output = result.getOutput();
|
||||
for (String line : Arrays.asList(
|
||||
"Found inner classes that are tests, which are excluded from the test runner:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT$InternalInvalidTests",
|
||||
"Classes ending with [Tests] must subclass [UnitTestCase]:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckInMainIT",
|
||||
"Not all subclasses of UnitTestCase match the naming convention. Concrete classes must end with [Tests]:",
|
||||
"* org.elasticsearch.test.WrongName")) {
|
||||
assertTrue(
|
||||
"expected: '" + line + "' but it was not found in the output:\n" + output,
|
||||
output.contains(line)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public void testNameCheckFailsAsItShouldWithMain() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
.withArguments("namingConventions", "-s", "-PcheckForTestsInMain=true")
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertNotNull("task did not run", result.task(":namingConventions"));
|
||||
assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome());
|
||||
|
||||
String output = result.getOutput();
|
||||
for (String line : Arrays.asList(
|
||||
"Classes ending with [Tests] or [IT] or extending [UnitTestCase] must be in src/test/java:",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyInterfaceTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyAbstractTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$InnerTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$NotImplementingTests",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongNameTheSecond",
|
||||
"* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongName")) {
|
||||
assertTrue(
|
||||
"expected: '" + line + "' but it was not found in the output:\n"+output,
|
||||
output.contains(line)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -16,28 +16,18 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.test;
|
||||
|
||||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.Map;
|
||||
|
||||
interface AwsS3Service extends Closeable {
|
||||
|
||||
/**
|
||||
* Creates then caches an {@code AmazonS3} client using the current client
|
||||
* settings. Returns an {@code AmazonS3Reference} wrapper which has to be
|
||||
* released as soon as it is not needed anymore.
|
||||
*/
|
||||
AmazonS3Reference client(String clientName);
|
||||
|
||||
/**
|
||||
* Updates settings for building clients and clears the client cache. Future
|
||||
* client requests will use the new settings to lazily build new clients.
|
||||
*
|
||||
* @param clientsSettings the new refreshed settings
|
||||
* @return the old stale settings
|
||||
*/
|
||||
Map<String, S3ClientSettings> refreshAndClearCache(Map<String, S3ClientSettings> clientsSettings);
|
||||
import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
|
||||
import org.junit.Assert;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(RandomizedRunner.class)
|
||||
@TestMethodProviders({
|
||||
JUnit4MethodProvider.class,
|
||||
JUnit3MethodProvider.class
|
||||
})
|
||||
public abstract class BaseTestCase extends Assert {
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
||||
|
||||
protected File getProjectDir(String name) {
|
||||
File root = new File("src/testKit/");
|
||||
if (root.exists() == false) {
|
||||
throw new RuntimeException("Could not find resources dir for integration tests. " +
|
||||
"Note that these tests can only be ran by Gradle and are not currently supported by the IDE");
|
||||
}
|
||||
return new File(root, name);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(RandomizedRunner.class)
|
||||
@TestMethodProviders({
|
||||
JUnit4MethodProvider.class,
|
||||
JUnit3MethodProvider.class
|
||||
})
|
||||
public abstract class GradleUnitTestCase extends BaseTestCase {
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.ClassModel;
|
||||
import com.carrotsearch.randomizedtesting.ClassModel.MethodModel;
|
||||
import com.carrotsearch.randomizedtesting.TestMethodProvider;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Backwards compatible test* method provider (public, non-static).
|
||||
*
|
||||
* copy of org.apache.lucene.util.LuceneJUnit3MethodProvider to avoid a dependency between build and test fw.
|
||||
*/
|
||||
public final class JUnit3MethodProvider implements TestMethodProvider {
|
||||
@Override
|
||||
public Collection<Method> getTestMethods(Class<?> suiteClass, ClassModel classModel) {
|
||||
Map<Method,MethodModel> methods = classModel.getMethods();
|
||||
ArrayList<Method> result = new ArrayList<>();
|
||||
for (MethodModel mm : methods.values()) {
|
||||
// Skip any methods that have overrieds/ shadows.
|
||||
if (mm.getDown() != null) continue;
|
||||
|
||||
Method m = mm.element;
|
||||
if (m.getName().startsWith("test") &&
|
||||
Modifier.isPublic(m.getModifiers()) &&
|
||||
!Modifier.isStatic(m.getModifiers()) &&
|
||||
m.getParameterTypes().length == 0) {
|
||||
result.add(m);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
plugins {
|
||||
id 'java'
|
||||
id 'elasticsearch.build'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
dependenciesInfo.enabled = false
|
||||
forbiddenApisMain.enabled = false
|
||||
forbiddenApisTest.enabled = false
|
||||
jarHell.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
ext.licenseFile = file("$buildDir/dummy/license")
|
||||
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||
|
||||
task hello {
|
||||
doFirst {
|
||||
println "build plugin can be applied"
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile "junit:junit:${versions.junit}"
|
||||
}
|
||||
|
||||
namingConventions {
|
||||
checkForTestsInMain = project.property("checkForTestsInMain") == "true"
|
||||
testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase'
|
||||
integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase'
|
||||
}
|
|
@ -23,4 +23,9 @@ package org.elasticsearch.test;
|
|||
* This class should fail the naming conventions self test.
|
||||
*/
|
||||
public class NamingConventionsCheckInMainIT {
|
||||
|
||||
public static class InternalInvalidTests extends NamingConventionsCheckBadClasses.UnitTestCase {
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
/**
|
||||
* This class should fail the naming conventions self test.
|
||||
*/
|
||||
public class WrongName extends NamingConventionsCheckBadClasses.UnitTestCase {
|
||||
}
|
|
@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
|
@ -65,14 +67,15 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateReque
|
|||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
|
@ -100,6 +103,7 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
|
@ -601,6 +605,21 @@ final class RequestConverters {
|
|||
request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template");
|
||||
|
||||
Params params = new Params(request);
|
||||
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
|
||||
if (multiSearchTemplateRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) {
|
||||
params.putParam("max_concurrent_searches", Integer.toString(multiSearchTemplateRequest.maxConcurrentSearchRequests()));
|
||||
}
|
||||
|
||||
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
||||
byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent);
|
||||
request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type())));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
|
||||
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
|
||||
|
@ -618,6 +637,19 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request explain(ExplainRequest explainRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME,
|
||||
endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"));
|
||||
|
||||
Params params = new Params(request);
|
||||
params.withStoredFields(explainRequest.storedFields());
|
||||
params.withFetchSourceContext(explainRequest.fetchSourceContext());
|
||||
params.withRouting(explainRequest.routing());
|
||||
params.withPreference(explainRequest.preference());
|
||||
request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps"));
|
||||
|
||||
|
@ -866,6 +898,39 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_snapshot")
|
||||
.addPathPart(createSnapshotRequest.repository())
|
||||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(getSnapshotsRequest.repository());
|
||||
String endpoint;
|
||||
if (getSnapshotsRequest.snapshots().length == 0) {
|
||||
endpoint = endpointBuilder.addPathPart("_all").build();
|
||||
} else {
|
||||
endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build();
|
||||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(deleteSnapshotRequest.repository())
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.elasticsearch.action.bulk.BulkRequest;
|
|||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
|
@ -66,6 +68,8 @@ import org.elasticsearch.index.rankeval.RankEvalResponse;
|
|||
import org.elasticsearch.plugins.spi.NamedXContentProvider;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateResponse;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
|
@ -614,6 +618,42 @@ public class RestHighLevelClient implements Closeable {
|
|||
SearchTemplateResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a request using the Explain API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html">Explain API on elastic.co</a>
|
||||
* @param explainRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public final ExplainResponse explain(ExplainRequest explainRequest, RequestOptions options) throws IOException {
|
||||
return performRequest(explainRequest, RequestConverters::explain, options,
|
||||
response -> {
|
||||
CheckedFunction<XContentParser, ExplainResponse, IOException> entityParser =
|
||||
parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response));
|
||||
return parseEntity(response.getEntity(), entityParser);
|
||||
},
|
||||
singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Explain API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html">Explain API on elastic.co</a>
|
||||
* @param explainRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public final void explainAsync(ExplainRequest explainRequest, RequestOptions options, ActionListener<ExplainResponse> listener) {
|
||||
performRequestAsync(explainRequest, RequestConverters::explain, options,
|
||||
response -> {
|
||||
CheckedFunction<XContentParser, ExplainResponse, IOException> entityParser =
|
||||
parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response));
|
||||
return parseEntity(response.getEntity(), entityParser);
|
||||
},
|
||||
listener, singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a request using the Ranking Evaluation API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html">Ranking Evaluation API
|
||||
|
@ -628,6 +668,32 @@ public class RestHighLevelClient implements Closeable {
|
|||
emptySet());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Executes a request using the Multi Search Template API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/multi-search-template.html">Multi Search Template API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate,
|
||||
options, MultiSearchTemplateResponse::fromXContext, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Multi Search Template API
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/multi-search-template.html">Multi Search Template API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
RequestOptions options,
|
||||
ActionListener<MultiSearchTemplateResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate,
|
||||
options, MultiSearchTemplateResponse::fromXContext, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Ranking Evaluation API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html">Ranking Evaluation API
|
||||
|
|
|
@ -28,8 +28,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -164,6 +168,59 @@ public final class SnapshotClient {
|
|||
VerifyRepositoryResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a snapshot.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*/
|
||||
public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously creates a snapshot.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*/
|
||||
public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
|
||||
ActionListener<CreateSnapshotResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get snapshots.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*
|
||||
* @param getSnapshotsRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get snapshots.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
* API on elastic.co</a>
|
||||
*
|
||||
* @param getSnapshotsRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options, ActionListener<GetSnapshotsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a snapshot.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
|
||||
|
|
|
@ -37,7 +37,9 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
|
@ -68,6 +70,7 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryReques
|
|||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
|
@ -111,6 +114,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.RandomCreateIndexGenerator;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.rankeval.PrecisionAtK;
|
||||
import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||
|
@ -120,6 +124,7 @@ import org.elasticsearch.index.rankeval.RestRankEvalAction;
|
|||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
|
@ -1369,7 +1374,53 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertEquals(Collections.emptyMap(), request.getParameters());
|
||||
assertToXContentBody(searchTemplateRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplate() throws Exception {
|
||||
final int numSearchRequests = randomIntBetween(1, 10);
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
for (int i = 0; i < numSearchRequests; i++) {
|
||||
// Create a random request.
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
SearchRequest searchRequest = new SearchRequest(indices);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomSearchParams(searchRequest, expectedParams);
|
||||
|
||||
// scroll is not supported in the current msearch or msearchtemplate api, so unset it:
|
||||
searchRequest.scroll((Scroll) null);
|
||||
// batched reduce size is currently not set-able on a per-request basis as it is a query string parameter only
|
||||
searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE);
|
||||
|
||||
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
|
||||
|
||||
SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest);
|
||||
|
||||
searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}");
|
||||
searchTemplateRequest.setScriptType(ScriptType.INLINE);
|
||||
searchTemplateRequest.setProfile(randomBoolean());
|
||||
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("field", "name");
|
||||
scriptParams.put("value", randomAlphaOfLengthBetween(2, 5));
|
||||
searchTemplateRequest.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(searchTemplateRequest);
|
||||
}
|
||||
|
||||
Request multiRequest = RequestConverters.multiSearchTemplate(multiSearchTemplateRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, multiRequest.getMethod());
|
||||
assertEquals("/_msearch/template", multiRequest.getEndpoint());
|
||||
List<SearchTemplateRequest> searchRequests = multiSearchTemplateRequest.requests();
|
||||
assertEquals(numSearchRequests, searchRequests.size());
|
||||
|
||||
HttpEntity actualEntity = multiRequest.getEntity();
|
||||
byte[] expectedBytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent());
|
||||
assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue());
|
||||
assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity)));
|
||||
}
|
||||
|
||||
public void testExistsAlias() {
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
||||
|
@ -1418,6 +1469,49 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testExplain() throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String type = randomAlphaOfLengthBetween(3, 10);
|
||||
String id = randomAlphaOfLengthBetween(3, 10);
|
||||
|
||||
ExplainRequest explainRequest = new ExplainRequest(index, type, id);
|
||||
explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10)));
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
||||
if (randomBoolean()) {
|
||||
String routing = randomAlphaOfLengthBetween(3, 10);
|
||||
explainRequest.routing(routing);
|
||||
expectedParams.put("routing", routing);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String preference = randomAlphaOfLengthBetween(3, 10);
|
||||
explainRequest.preference(preference);
|
||||
expectedParams.put("preference", preference);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String[] storedFields = generateRandomStringArray(10, 5, false, false);
|
||||
String storedFieldsParams = randomFields(storedFields);
|
||||
explainRequest.storedFields(storedFields);
|
||||
expectedParams.put("stored_fields", storedFieldsParams);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
randomizeFetchSourceContextParams(explainRequest::fetchSourceContext, expectedParams);
|
||||
}
|
||||
|
||||
Request request = RequestConverters.explain(explainRequest);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add(index)
|
||||
.add(type)
|
||||
.add(id)
|
||||
.add("_explain");
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertToXContentBody(explainRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testFieldCaps() {
|
||||
// Create a random request.
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
|
@ -1943,6 +2037,80 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
}
|
||||
|
||||
public void testCreateSnapshot() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0];
|
||||
String endpoint = "/_snapshot/" + repository + "/" + snapshot;
|
||||
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
setRandomMasterTimeout(createSnapshotRequest, expectedParams);
|
||||
Boolean waitForCompletion = randomBoolean();
|
||||
createSnapshotRequest.waitForCompletion(waitForCompletion);
|
||||
|
||||
if (waitForCompletion) {
|
||||
expectedParams.put("wait_for_completion", waitForCompletion.toString());
|
||||
}
|
||||
|
||||
Request request = RequestConverters.createSnapshot(createSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertToXContentBody(createSnapshotRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest();
|
||||
getSnapshotsRequest.repository(repository);
|
||||
getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0]));
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetAllSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository);
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteSnapshot() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
|
@ -2264,7 +2432,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
expectedParams.put("preference", searchRequest.preference());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
searchRequest.searchType(randomFrom(SearchType.values()));
|
||||
searchRequest.searchType(randomFrom(SearchType.CURRENTLY_SUPPORTED));
|
||||
}
|
||||
expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.apache.http.entity.StringEntity;
|
|||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
|
@ -44,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.ScriptQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||
import org.elasticsearch.join.aggregations.Children;
|
||||
|
@ -51,6 +54,9 @@ import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse.Item;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateResponse;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -63,6 +69,7 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats;
|
|||
import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
|
@ -135,7 +142,44 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc);
|
||||
doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc);
|
||||
client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3/_refresh");
|
||||
|
||||
mappings = new StringEntity(
|
||||
"{" +
|
||||
" \"mappings\": {" +
|
||||
" \"doc\": {" +
|
||||
" \"properties\": {" +
|
||||
" \"field1\": {" +
|
||||
" \"type\": \"keyword\"," +
|
||||
" \"store\": true" +
|
||||
" }," +
|
||||
" \"field2\": {" +
|
||||
" \"type\": \"keyword\"," +
|
||||
" \"store\": true" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}}",
|
||||
ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings);
|
||||
doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc);
|
||||
StringEntity aliasFilter = new StringEntity(
|
||||
"{" +
|
||||
" \"actions\" : [" +
|
||||
" {" +
|
||||
" \"add\" : {" +
|
||||
" \"index\" : \"index4\"," +
|
||||
" \"alias\" : \"alias4\"," +
|
||||
" \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" ]" +
|
||||
"}",
|
||||
ContentType.APPLICATION_JSON);
|
||||
client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter);
|
||||
|
||||
client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh");
|
||||
}
|
||||
|
||||
public void testSearchNoQuery() throws IOException {
|
||||
|
@ -834,6 +878,273 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON);
|
||||
}
|
||||
|
||||
|
||||
public void testMultiSearchTemplate() throws Exception {
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
SearchTemplateRequest goodRequest = new SearchTemplateRequest();
|
||||
goodRequest.setRequest(new SearchRequest("index"));
|
||||
goodRequest.setScriptType(ScriptType.INLINE);
|
||||
goodRequest.setScript(
|
||||
"{" +
|
||||
" \"query\": {" +
|
||||
" \"match\": {" +
|
||||
" \"num\": {{number}}" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", 10);
|
||||
goodRequest.setScriptParams(scriptParams);
|
||||
goodRequest.setExplain(true);
|
||||
goodRequest.setProfile(true);
|
||||
multiSearchTemplateRequest.add(goodRequest);
|
||||
|
||||
|
||||
SearchTemplateRequest badRequest = new SearchTemplateRequest();
|
||||
badRequest.setRequest(new SearchRequest("index"));
|
||||
badRequest.setScriptType(ScriptType.INLINE);
|
||||
badRequest.setScript("{ NOT VALID JSON {{number}} }");
|
||||
scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", 10);
|
||||
badRequest.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(badRequest);
|
||||
|
||||
MultiSearchTemplateResponse multiSearchTemplateResponse =
|
||||
execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
|
||||
highLevelClient()::multiSearchTemplateAsync);
|
||||
|
||||
Item[] responses = multiSearchTemplateResponse.getResponses();
|
||||
|
||||
assertEquals(2, responses.length);
|
||||
|
||||
|
||||
assertNull(responses[0].getResponse().getSource());
|
||||
SearchResponse goodResponse =responses[0].getResponse().getResponse();
|
||||
assertNotNull(goodResponse);
|
||||
assertThat(responses[0].isFailure(), Matchers.is(false));
|
||||
assertEquals(1, goodResponse.getHits().totalHits);
|
||||
assertEquals(1, goodResponse.getHits().getHits().length);
|
||||
assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f));
|
||||
SearchHit hit = goodResponse.getHits().getHits()[0];
|
||||
assertNotNull(hit.getExplanation());
|
||||
assertFalse(goodResponse.getProfileResults().isEmpty());
|
||||
|
||||
|
||||
assertNull(responses[0].getResponse().getSource());
|
||||
assertThat(responses[1].isFailure(), Matchers.is(true));
|
||||
assertNotNull(responses[1].getFailureMessage());
|
||||
assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception"));
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateAllBad() throws Exception {
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
SearchTemplateRequest badRequest1 = new SearchTemplateRequest();
|
||||
badRequest1.setRequest(new SearchRequest("index"));
|
||||
badRequest1.setScriptType(ScriptType.INLINE);
|
||||
badRequest1.setScript(
|
||||
"{" +
|
||||
" \"query\": {" +
|
||||
" \"match\": {" +
|
||||
" \"num\": {{number}}" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", "BAD NUMBER");
|
||||
badRequest1.setScriptParams(scriptParams);
|
||||
multiSearchTemplateRequest.add(badRequest1);
|
||||
|
||||
|
||||
SearchTemplateRequest badRequest2 = new SearchTemplateRequest();
|
||||
badRequest2.setRequest(new SearchRequest("index"));
|
||||
badRequest2.setScriptType(ScriptType.INLINE);
|
||||
badRequest2.setScript("BAD QUERY TEMPLATE");
|
||||
scriptParams = new HashMap<>();
|
||||
scriptParams.put("number", "BAD NUMBER");
|
||||
badRequest2.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(badRequest2);
|
||||
|
||||
// The whole HTTP request should fail if no nested search requests are valid
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
|
||||
highLevelClient()::multiSearchTemplateAsync));
|
||||
|
||||
assertEquals(RestStatus.BAD_REQUEST, exception.status());
|
||||
assertThat(exception.getMessage(), containsString("no requests added"));
|
||||
}
|
||||
|
||||
public void testExplain() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.termQuery("field", "value1"));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), greaterThan(0.0f));
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.termQuery("field", "value2"));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery("field", "value1"))
|
||||
.must(QueryBuilders.termQuery("field", "value2")));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getDetails().length, equalTo(2));
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainNonExistent() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("non_existent_index", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchQuery("field", "value"));
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
|
||||
() -> execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync));
|
||||
assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND));
|
||||
assertThat(exception.getIndex().getName(), equalTo("non_existent_index"));
|
||||
assertThat(exception.getDetailedMessage(),
|
||||
containsString("Elasticsearch exception [type=index_not_found_exception, reason=no such index]"));
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "999");
|
||||
explainRequest.query(QueryBuilders.matchQuery("field", "value1"));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("doc"));
|
||||
assertThat(explainResponse.getId(), equalTo("999"));
|
||||
assertFalse(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
assertFalse(explainResponse.hasExplanation());
|
||||
assertNull(explainResponse.getGetResult());
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainWithStoredFields() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.storedFields(new String[]{"field1"});
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getFields().keySet(), equalTo(Collections.singleton("field1")));
|
||||
assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1"));
|
||||
assertTrue(explainResponse.getGetResult().isSourceEmpty());
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.storedFields(new String[]{"field1", "field2"});
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getFields().keySet().size(), equalTo(2));
|
||||
assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1"));
|
||||
assertThat(explainResponse.getGetResult().getFields().get("field2").getValue().toString(), equalTo("value2"));
|
||||
assertTrue(explainResponse.getGetResult().isSourceEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainWithFetchSource() throws IOException {
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, null));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1")));
|
||||
}
|
||||
{
|
||||
ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
explainRequest.fetchSourceContext(new FetchSourceContext(true, null, new String[] {"field2"}));
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
assertTrue(explainResponse.hasExplanation());
|
||||
assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f));
|
||||
assertTrue(explainResponse.getGetResult().isExists());
|
||||
assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1")));
|
||||
}
|
||||
}
|
||||
|
||||
public void testExplainWithAliasFilter() throws IOException {
|
||||
ExplainRequest explainRequest = new ExplainRequest("alias4", "doc", "1");
|
||||
explainRequest.query(QueryBuilders.matchAllQuery());
|
||||
|
||||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
}
|
||||
|
||||
public void testFieldCaps() throws IOException {
|
||||
FieldCapabilitiesRequest request = new FieldCapabilitiesRequest()
|
||||
|
|
|
@ -28,15 +28,20 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
||||
|
@ -49,12 +54,12 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
highLevelClient().snapshot()::createRepositoryAsync);
|
||||
}
|
||||
|
||||
private Response createTestSnapshot(String repository, String snapshot) throws IOException {
|
||||
Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repository, snapshot));
|
||||
createSnapshot.addParameter("wait_for_completion", "true");
|
||||
return highLevelClient().getLowLevelClient().performRequest(createSnapshot);
|
||||
}
|
||||
private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
// assumes the repository already exists
|
||||
|
||||
return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot,
|
||||
highLevelClient().snapshot()::createSnapshotAsync);
|
||||
}
|
||||
|
||||
public void testCreateRepository() throws IOException {
|
||||
PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}");
|
||||
|
@ -119,6 +124,55 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(response.getNodes().size(), equalTo(1));
|
||||
}
|
||||
|
||||
public void testCreateSnapshot() throws IOException {
|
||||
String repository = "test_repository";
|
||||
assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged());
|
||||
|
||||
String snapshot = "test_snapshot";
|
||||
CreateSnapshotRequest request = new CreateSnapshotRequest(repository, snapshot);
|
||||
boolean waitForCompletion = randomBoolean();
|
||||
request.waitForCompletion(waitForCompletion);
|
||||
request.partial(randomBoolean());
|
||||
request.includeGlobalState(randomBoolean());
|
||||
|
||||
CreateSnapshotResponse response = createTestSnapshot(request);
|
||||
assertEquals(waitForCompletion ? RestStatus.OK : RestStatus.ACCEPTED, response.status());
|
||||
}
|
||||
|
||||
public void testGetSnapshots() throws IOException {
|
||||
String repository = "test_repository";
|
||||
String snapshot1 = "test_snapshot1";
|
||||
String snapshot2 = "test_snapshot2";
|
||||
|
||||
PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}");
|
||||
assertTrue(putRepositoryResponse.isAcknowledged());
|
||||
|
||||
CreateSnapshotRequest createSnapshotRequest1 = new CreateSnapshotRequest(repository, snapshot1);
|
||||
createSnapshotRequest1.waitForCompletion(true);
|
||||
CreateSnapshotResponse putSnapshotResponse1 = createTestSnapshot(createSnapshotRequest1);
|
||||
CreateSnapshotRequest createSnapshotRequest2 = new CreateSnapshotRequest(repository, snapshot2);
|
||||
createSnapshotRequest2.waitForCompletion(true);
|
||||
CreateSnapshotResponse putSnapshotResponse2 = createTestSnapshot(createSnapshotRequest2);
|
||||
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
|
||||
assertEquals(RestStatus.OK, putSnapshotResponse1.status());
|
||||
assertEquals(RestStatus.OK, putSnapshotResponse2.status());
|
||||
|
||||
GetSnapshotsRequest request;
|
||||
if (randomBoolean()) {
|
||||
request = new GetSnapshotsRequest(repository);
|
||||
} else if (randomBoolean()) {
|
||||
request = new GetSnapshotsRequest(repository, new String[] {"_all"});
|
||||
|
||||
} else {
|
||||
request = new GetSnapshotsRequest(repository, new String[] {snapshot1, snapshot2});
|
||||
}
|
||||
GetSnapshotsResponse response = execute(request, highLevelClient().snapshot()::get, highLevelClient().snapshot()::getAsync);
|
||||
|
||||
assertEquals(2, response.getSnapshots().size());
|
||||
assertThat(response.getSnapshots().stream().map((s) -> s.snapshotId().getName()).collect(Collectors.toList()),
|
||||
contains("test_snapshot1", "test_snapshot2"));
|
||||
}
|
||||
|
||||
public void testDeleteSnapshot() throws IOException {
|
||||
String repository = "test_repository";
|
||||
String snapshot = "test_snapshot";
|
||||
|
@ -126,9 +180,11 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}");
|
||||
assertTrue(putRepositoryResponse.isAcknowledged());
|
||||
|
||||
Response putSnapshotResponse = createTestSnapshot(repository, snapshot);
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
createSnapshotRequest.waitForCompletion(true);
|
||||
CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
|
||||
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
|
||||
assertEquals(200, putSnapshotResponse.getStatusLine().getStatusCode());
|
||||
assertEquals(RestStatus.OK, createSnapshotResponse.status());
|
||||
|
||||
DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot);
|
||||
DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync);
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
|
@ -47,10 +50,12 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
|
@ -66,6 +71,9 @@ import org.elasticsearch.index.rankeval.RatedRequest;
|
|||
import org.elasticsearch.index.rankeval.RatedSearchHit;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateResponse.Item;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateResponse;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
|
@ -80,6 +88,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||
import org.elasticsearch.search.profile.ProfileResult;
|
||||
|
@ -767,21 +776,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
RestClient restClient = client();
|
||||
|
||||
// tag::register-script
|
||||
Request scriptRequest = new Request("POST", "_scripts/title_search");
|
||||
scriptRequest.setJsonEntity(
|
||||
"{" +
|
||||
" \"script\": {" +
|
||||
" \"lang\": \"mustache\"," +
|
||||
" \"source\": {" +
|
||||
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
|
||||
" \"size\" : \"{{size}}\"" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Response scriptResponse = restClient.performRequest(scriptRequest);
|
||||
// end::register-script
|
||||
assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode());
|
||||
registerQueryScript(restClient);
|
||||
|
||||
// tag::search-template-request-stored
|
||||
SearchTemplateRequest request = new SearchTemplateRequest();
|
||||
|
@ -834,6 +829,223 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateWithInlineScript() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::multi-search-template-request-inline
|
||||
String [] searchTerms = {"elasticsearch", "logstash", "kibana"};
|
||||
|
||||
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); // <1>
|
||||
for (String searchTerm : searchTerms) {
|
||||
SearchTemplateRequest request = new SearchTemplateRequest(); // <2>
|
||||
request.setRequest(new SearchRequest("posts"));
|
||||
|
||||
request.setScriptType(ScriptType.INLINE);
|
||||
request.setScript(
|
||||
"{" +
|
||||
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
|
||||
" \"size\" : \"{{size}}\"" +
|
||||
"}");
|
||||
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("field", "title");
|
||||
scriptParams.put("value", searchTerm);
|
||||
scriptParams.put("size", 5);
|
||||
request.setScriptParams(scriptParams);
|
||||
|
||||
multiRequest.add(request); // <3>
|
||||
}
|
||||
// end::multi-search-template-request-inline
|
||||
|
||||
// tag::multi-search-template-request-sync
|
||||
MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT);
|
||||
// end::multi-search-template-request-sync
|
||||
|
||||
// tag::multi-search-template-response
|
||||
for (Item item : multiResponse.getResponses()) { // <1>
|
||||
if (item.isFailure()) {
|
||||
String error = item.getFailureMessage(); // <2>
|
||||
} else {
|
||||
SearchTemplateResponse searchTemplateResponse = item.getResponse(); // <3>
|
||||
SearchResponse searchResponse = searchTemplateResponse.getResponse();
|
||||
searchResponse.getHits();
|
||||
}
|
||||
}
|
||||
// end::multi-search-template-response
|
||||
|
||||
assertNotNull(multiResponse);
|
||||
assertEquals(searchTerms.length, multiResponse.getResponses().length);
|
||||
assertNotNull(multiResponse.getResponses()[0]);
|
||||
SearchResponse searchResponse = multiResponse.getResponses()[0].getResponse().getResponse();
|
||||
assertTrue(searchResponse.getHits().totalHits > 0);
|
||||
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateWithStoredScript() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
RestClient restClient = client();
|
||||
|
||||
registerQueryScript(restClient);
|
||||
|
||||
// tag::multi-search-template-request-stored
|
||||
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest();
|
||||
|
||||
String [] searchTerms = {"elasticsearch", "logstash", "kibana"};
|
||||
for (String searchTerm : searchTerms) {
|
||||
|
||||
SearchTemplateRequest request = new SearchTemplateRequest();
|
||||
request.setRequest(new SearchRequest("posts"));
|
||||
|
||||
request.setScriptType(ScriptType.STORED);
|
||||
request.setScript("title_search");
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("field", "title");
|
||||
params.put("value", searchTerm);
|
||||
params.put("size", 5);
|
||||
request.setScriptParams(params);
|
||||
multiRequest.add(request);
|
||||
}
|
||||
// end::multi-search-template-request-stored
|
||||
|
||||
|
||||
|
||||
|
||||
// tag::multi-search-template-execute
|
||||
MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT);
|
||||
// end::multi-search-template-execute
|
||||
|
||||
assertNotNull(multiResponse);
|
||||
assertEquals(searchTerms.length, multiResponse.getResponses().length);
|
||||
assertNotNull(multiResponse.getResponses()[0]);
|
||||
SearchResponse searchResponse = multiResponse.getResponses()[0].getResponse().getResponse();
|
||||
assertTrue(searchResponse.getHits().totalHits > 0);
|
||||
|
||||
// tag::multi-search-template-execute-listener
|
||||
ActionListener<MultiSearchTemplateResponse> listener = new ActionListener<MultiSearchTemplateResponse>() {
|
||||
@Override
|
||||
public void onResponse(MultiSearchTemplateResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::multi-search-template-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener for tests.
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::multi-search-template-execute-async
|
||||
client.multiSearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener);
|
||||
// end::multi-search-template-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
protected void registerQueryScript(RestClient restClient) throws IOException {
|
||||
// tag::register-script
|
||||
Request scriptRequest = new Request("POST", "_scripts/title_search");
|
||||
scriptRequest.setJsonEntity(
|
||||
"{" +
|
||||
" \"script\": {" +
|
||||
" \"lang\": \"mustache\"," +
|
||||
" \"source\": {" +
|
||||
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
|
||||
" \"size\" : \"{{size}}\"" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}");
|
||||
Response scriptResponse = restClient.performRequest(scriptRequest);
|
||||
// end::register-script
|
||||
assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
|
||||
public void testExplain() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::explain-request
|
||||
ExplainRequest request = new ExplainRequest("contributors", "doc", "1");
|
||||
request.query(QueryBuilders.termQuery("user", "tanguy"));
|
||||
// end::explain-request
|
||||
|
||||
// tag::explain-request-routing
|
||||
request.routing("routing"); // <1>
|
||||
// end::explain-request-routing
|
||||
|
||||
// tag::explain-request-preference
|
||||
request.preference("_local"); // <1>
|
||||
// end::explain-request-preference
|
||||
|
||||
// tag::explain-request-source
|
||||
request.fetchSourceContext(new FetchSourceContext(true, new String[]{"user"}, null)); // <1>
|
||||
// end::explain-request-source
|
||||
|
||||
// tag::explain-request-stored-field
|
||||
request.storedFields(new String[]{"user"}); // <1>
|
||||
// end::explain-request-stored-field
|
||||
|
||||
// tag::explain-execute
|
||||
ExplainResponse response = client.explain(request, RequestOptions.DEFAULT);
|
||||
// end::explain-execute
|
||||
|
||||
// tag::explain-response
|
||||
String index = response.getIndex(); // <1>
|
||||
String type = response.getType(); // <2>
|
||||
String id = response.getId(); // <3>
|
||||
boolean exists = response.isExists(); // <4>
|
||||
boolean match = response.isMatch(); // <5>
|
||||
boolean hasExplanation = response.hasExplanation(); // <6>
|
||||
Explanation explanation = response.getExplanation(); // <7>
|
||||
GetResult getResult = response.getGetResult(); // <8>
|
||||
// end::explain-response
|
||||
assertThat(index, equalTo("contributors"));
|
||||
assertThat(type, equalTo("doc"));
|
||||
assertThat(id, equalTo("1"));
|
||||
assertTrue(exists);
|
||||
assertTrue(match);
|
||||
assertTrue(hasExplanation);
|
||||
assertNotNull(explanation);
|
||||
assertNotNull(getResult);
|
||||
|
||||
// tag::get-result
|
||||
Map<String, Object> source = getResult.getSource(); // <1>
|
||||
Map<String, DocumentField> fields = getResult.getFields(); // <2>
|
||||
// end::get-result
|
||||
assertThat(source, equalTo(Collections.singletonMap("user", "tanguy")));
|
||||
assertThat(fields.get("user").getValue(), equalTo("tanguy"));
|
||||
|
||||
// tag::explain-execute-listener
|
||||
ActionListener<ExplainResponse> listener = new ActionListener<ExplainResponse>() {
|
||||
@Override
|
||||
public void onResponse(ExplainResponse explainResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::explain-execute-listener
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::explain-execute-async
|
||||
client.explainAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::explain-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testFieldCaps() throws Exception {
|
||||
indexSearchTestData();
|
||||
|
@ -1046,7 +1258,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(authorsResponse.isAcknowledged());
|
||||
|
||||
CreateIndexRequest reviewersRequest = new CreateIndexRequest("contributors")
|
||||
.mapping("doc", "user", "type=keyword");
|
||||
.mapping("doc", "user", "type=keyword,store=true");
|
||||
CreateIndexResponse reviewersResponse = highLevelClient().indices().create(reviewersRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(reviewersResponse.isAcknowledged());
|
||||
|
||||
|
|
|
@ -29,6 +29,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
|
@ -41,6 +47,8 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -367,6 +375,164 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
public void testSnapshotCreate() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("test-index0");
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
createIndexRequest = new CreateIndexRequest("test-index1");
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
|
||||
createTestRepositories();
|
||||
|
||||
// tag::create-snapshot-request
|
||||
CreateSnapshotRequest request = new CreateSnapshotRequest();
|
||||
// end::create-snapshot-request
|
||||
|
||||
// tag::create-snapshot-request-repositoryName
|
||||
request.repository(repositoryName); // <1>
|
||||
// end::create-snapshot-request-repositoryName
|
||||
// tag::create-snapshot-request-snapshotName
|
||||
request.snapshot(snapshotName); // <1>
|
||||
// end::create-snapshot-request-snapshotName
|
||||
// tag::create-snapshot-request-indices
|
||||
request.indices("test-index0", "test-index1"); // <1>
|
||||
// end::create-snapshot-request-indices
|
||||
// tag::create-snapshot-request-indicesOptions
|
||||
request.indicesOptions(IndicesOptions.fromOptions(false, false, true, true)); // <1>
|
||||
// end::create-snapshot-request-indicesOptions
|
||||
// tag::create-snapshot-request-partial
|
||||
request.partial(false); // <1>
|
||||
// end::create-snapshot-request-partial
|
||||
// tag::create-snapshot-request-includeGlobalState
|
||||
request.includeGlobalState(true); // <1>
|
||||
// end::create-snapshot-request-includeGlobalState
|
||||
|
||||
// tag::create-snapshot-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::create-snapshot-request-masterTimeout
|
||||
// tag::create-snapshot-request-waitForCompletion
|
||||
request.waitForCompletion(true); // <1>
|
||||
// end::create-snapshot-request-waitForCompletion
|
||||
|
||||
// tag::create-snapshot-execute
|
||||
CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT);
|
||||
// end::create-snapshot-execute
|
||||
|
||||
// tag::create-snapshot-response
|
||||
RestStatus status = response.status(); // <1>
|
||||
// end::create-snapshot-response
|
||||
|
||||
assertEquals(RestStatus.OK, status);
|
||||
}
|
||||
|
||||
public void testSnapshotCreateAsync() throws InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
CreateSnapshotRequest request = new CreateSnapshotRequest(repositoryName, snapshotName);
|
||||
|
||||
// tag::create-snapshot-execute-listener
|
||||
ActionListener<CreateSnapshotResponse> listener =
|
||||
new ActionListener<CreateSnapshotResponse>() {
|
||||
@Override
|
||||
public void onResponse(CreateSnapshotResponse createSnapshotResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::create-snapshot-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::create-snapshot-execute-async
|
||||
client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::create-snapshot-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSnapshotGetSnapshots() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
createTestRepositories();
|
||||
createTestSnapshots();
|
||||
|
||||
// tag::get-snapshots-request
|
||||
GetSnapshotsRequest request = new GetSnapshotsRequest();
|
||||
// end::get-snapshots-request
|
||||
|
||||
// tag::get-snapshots-request-repositoryName
|
||||
request.repository(repositoryName); // <1>
|
||||
// end::get-snapshots-request-repositoryName
|
||||
|
||||
// tag::get-snapshots-request-snapshots
|
||||
String[] snapshots = { snapshotName };
|
||||
request.snapshots(snapshots); // <1>
|
||||
// end::get-snapshots-request-snapshots
|
||||
|
||||
// tag::get-snapshots-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::get-snapshots-request-masterTimeout
|
||||
|
||||
// tag::get-snapshots-request-verbose
|
||||
request.verbose(true); // <1>
|
||||
// end::get-snapshots-request-verbose
|
||||
|
||||
// tag::get-snapshots-request-ignore-unavailable
|
||||
request.ignoreUnavailable(false); // <1>
|
||||
// end::get-snapshots-request-ignore-unavailable
|
||||
|
||||
// tag::get-snapshots-execute
|
||||
GetSnapshotsResponse response = client.snapshot().get(request, RequestOptions.DEFAULT);
|
||||
// end::get-snapshots-execute
|
||||
|
||||
// tag::get-snapshots-response
|
||||
List<SnapshotInfo> snapshotsInfos = response.getSnapshots(); // <1>
|
||||
// end::get-snapshots-response
|
||||
assertEquals(1, snapshotsInfos.size());
|
||||
}
|
||||
|
||||
public void testSnapshotGetSnapshotsAsync() throws InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
GetSnapshotsRequest request = new GetSnapshotsRequest(repositoryName);
|
||||
|
||||
// tag::get-snapshots-execute-listener
|
||||
ActionListener<GetSnapshotsResponse> listener =
|
||||
new ActionListener<GetSnapshotsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetSnapshotsResponse getSnapshotsResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-snapshots-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-snapshots-execute-async
|
||||
client.snapshot().getAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-snapshots-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSnapshotDeleteSnapshot() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -615,16 +615,16 @@ public class RestClient implements Closeable {
|
|||
*/
|
||||
private NodeTuple<Iterator<Node>> nextNode() throws IOException {
|
||||
NodeTuple<List<Node>> nodeTuple = this.nodeTuple;
|
||||
List<Node> hosts = selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector);
|
||||
Iterable<Node> hosts = selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector);
|
||||
return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select hosts to try. Package private for testing.
|
||||
* Select nodes to try and sorts them so that the first one will be tried initially, then the following ones
|
||||
* if the previous attempt failed and so on. Package private for testing.
|
||||
*/
|
||||
static List<Node> selectHosts(NodeTuple<List<Node>> nodeTuple,
|
||||
Map<HttpHost, DeadHostState> blacklist, AtomicInteger lastNodeIndex,
|
||||
NodeSelector nodeSelector) throws IOException {
|
||||
static Iterable<Node> selectNodes(NodeTuple<List<Node>> nodeTuple, Map<HttpHost, DeadHostState> blacklist,
|
||||
AtomicInteger lastNodeIndex, NodeSelector nodeSelector) throws IOException {
|
||||
/*
|
||||
* Sort the nodes into living and dead lists.
|
||||
*/
|
||||
|
@ -653,8 +653,8 @@ public class RestClient implements Closeable {
|
|||
nodeSelector.select(selectedLivingNodes);
|
||||
if (false == selectedLivingNodes.isEmpty()) {
|
||||
/*
|
||||
* Rotate the list so subsequent requests will prefer the
|
||||
* nodes in a different order.
|
||||
* Rotate the list using a global counter as the distance so subsequent
|
||||
* requests will try the nodes in a different order.
|
||||
*/
|
||||
Collections.rotate(selectedLivingNodes, lastNodeIndex.getAndIncrement());
|
||||
return selectedLivingNodes;
|
||||
|
@ -662,15 +662,13 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
|
||||
/*
|
||||
* Last resort: If there are no good nodes to use, either because
|
||||
* Last resort: there are no good nodes to use, either because
|
||||
* the selector rejected all the living nodes or because there aren't
|
||||
* any living ones. Either way, we want to revive a single dead node
|
||||
* that the NodeSelectors are OK with. We do this by sorting the dead
|
||||
* nodes by their revival time and passing them through the
|
||||
* NodeSelector so it can have its say in which nodes are ok and their
|
||||
* ordering. If the selector is ok with any of the nodes then use just
|
||||
* the first one in the list because we only want to revive a single
|
||||
* node.
|
||||
* that the NodeSelectors are OK with. We do this by passing the dead
|
||||
* nodes through the NodeSelector so it can have its say in which nodes
|
||||
* are ok. If the selector is ok with any of the nodes then we will take
|
||||
* the one in the list that has the lowest revival time and try it.
|
||||
*/
|
||||
if (false == deadNodes.isEmpty()) {
|
||||
final List<DeadNode> selectedDeadNodes = new ArrayList<>(deadNodes);
|
||||
|
@ -1010,8 +1008,8 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Adapts an <code>Iterator<DeadNodeAndRevival></code> into an
|
||||
* <code>Iterator<Node></code>.
|
||||
* Adapts an <code>Iterator<DeadNodeAndRevival></code> into an
|
||||
* <code>Iterator<Node></code>.
|
||||
*/
|
||||
private static class DeadNodeIteratorAdapter implements Iterator<Node> {
|
||||
private final Iterator<DeadNode> itr;
|
||||
|
|
|
@ -314,7 +314,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeaders()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void tesPerformRequestOldStyleNullHeaders() throws IOException {
|
||||
|
|
|
@ -21,6 +21,9 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.client.AuthCache;
|
||||
import org.apache.http.impl.auth.BasicScheme;
|
||||
import org.apache.http.impl.client.BasicAuthCache;
|
||||
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
|
||||
import org.elasticsearch.client.DeadHostStateTests.ConfigurableTimeSupplier;
|
||||
import org.elasticsearch.client.RestClient.NodeTuple;
|
||||
|
@ -35,13 +38,14 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertSame;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
@ -141,7 +145,7 @@ public class RestClientTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeader()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void testPerformOldStyleAsyncWithNullHeaders() throws Exception {
|
||||
|
@ -407,8 +411,8 @@ public class RestClientTests extends RestClientTestCase {
|
|||
* blacklist time. It'll revive the node that is closest
|
||||
* to being revived that the NodeSelector is ok with.
|
||||
*/
|
||||
assertEquals(singletonList(n1), RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY));
|
||||
assertEquals(singletonList(n2), RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(), not1));
|
||||
assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY));
|
||||
assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), not1));
|
||||
|
||||
/*
|
||||
* Try a NodeSelector that excludes all nodes. This should
|
||||
|
@ -449,23 +453,23 @@ public class RestClientTests extends RestClientTestCase {
|
|||
Map<HttpHost, DeadHostState> blacklist, NodeSelector nodeSelector) throws IOException {
|
||||
int iterations = 1000;
|
||||
AtomicInteger lastNodeIndex = new AtomicInteger(0);
|
||||
assertEquals(expectedNodes, RestClient.selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
// Calling it again rotates the set of results
|
||||
for (int i = 1; i < iterations; i++) {
|
||||
Collections.rotate(expectedNodes, 1);
|
||||
assertEquals("iteration " + i, expectedNodes,
|
||||
RestClient.selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that {@link RestClient#selectHosts} fails on the provided arguments.
|
||||
* Assert that {@link RestClient#selectNodes} fails on the provided arguments.
|
||||
* @return the message in the exception thrown by the failure
|
||||
*/
|
||||
private String assertSelectAllRejected( NodeTuple<List<Node>> nodeTuple,
|
||||
private static String assertSelectAllRejected( NodeTuple<List<Node>> nodeTuple,
|
||||
Map<HttpHost, DeadHostState> blacklist, NodeSelector nodeSelector) {
|
||||
try {
|
||||
RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector);
|
||||
RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector);
|
||||
throw new AssertionError("expected selectHosts to fail");
|
||||
} catch (IOException e) {
|
||||
return e.getMessage();
|
||||
|
@ -478,5 +482,56 @@ public class RestClientTests extends RestClientTestCase {
|
|||
new Header[] {}, nodes, null, null, null);
|
||||
}
|
||||
|
||||
public void testRoundRobin() throws IOException {
|
||||
int numNodes = randomIntBetween(2, 10);
|
||||
AuthCache authCache = new BasicAuthCache();
|
||||
List<Node> nodes = new ArrayList<>(numNodes);
|
||||
for (int i = 0; i < numNodes; i++) {
|
||||
Node node = new Node(new HttpHost("localhost", 9200 + i));
|
||||
nodes.add(node);
|
||||
authCache.put(node.getHost(), new BasicScheme());
|
||||
}
|
||||
NodeTuple<List<Node>> nodeTuple = new NodeTuple<>(nodes, authCache);
|
||||
|
||||
//test the transition from negative to positive values
|
||||
AtomicInteger lastNodeIndex = new AtomicInteger(-numNodes);
|
||||
assertNodes(nodeTuple, lastNodeIndex, 50);
|
||||
assertEquals(-numNodes + 50, lastNodeIndex.get());
|
||||
|
||||
//test the highest positive values up to MAX_VALUE
|
||||
lastNodeIndex.set(Integer.MAX_VALUE - numNodes * 10);
|
||||
assertNodes(nodeTuple, lastNodeIndex, numNodes * 10);
|
||||
assertEquals(Integer.MAX_VALUE, lastNodeIndex.get());
|
||||
|
||||
//test the transition from MAX_VALUE to MIN_VALUE
|
||||
//this is the only time where there is most likely going to be a jump from a node
|
||||
//to another one that's not necessarily the next one.
|
||||
assertEquals(Integer.MIN_VALUE, lastNodeIndex.incrementAndGet());
|
||||
assertNodes(nodeTuple, lastNodeIndex, 50);
|
||||
assertEquals(Integer.MIN_VALUE + 50, lastNodeIndex.get());
|
||||
}
|
||||
|
||||
private static void assertNodes(NodeTuple<List<Node>> nodeTuple, AtomicInteger lastNodeIndex, int runs) throws IOException {
|
||||
int distance = lastNodeIndex.get() % nodeTuple.nodes.size();
|
||||
/*
|
||||
* Collections.rotate is not super intuitive: distance 1 means that the last element will become the first and so on,
|
||||
* while distance -1 means that the second element will become the first and so on.
|
||||
*/
|
||||
int expectedOffset = distance > 0 ? nodeTuple.nodes.size() - distance : Math.abs(distance);
|
||||
for (int i = 0; i < runs; i++) {
|
||||
Iterable<Node> selectedNodes = RestClient.selectNodes(nodeTuple, Collections.<HttpHost, DeadHostState>emptyMap(),
|
||||
lastNodeIndex, NodeSelector.ANY);
|
||||
List<Node> expectedNodes = nodeTuple.nodes;
|
||||
int index = 0;
|
||||
for (Node actualNode : selectedNodes) {
|
||||
Node expectedNode = expectedNodes.get((index + expectedOffset) % expectedNodes.size());
|
||||
assertSame(expectedNode, actualNode);
|
||||
index++;
|
||||
}
|
||||
expectedOffset--;
|
||||
if (expectedOffset < 0) {
|
||||
expectedOffset += nodeTuple.nodes.size();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,7 +122,7 @@ case "$1" in
|
|||
ulimit -l $MAX_LOCKED_MEMORY
|
||||
fi
|
||||
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -ge $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -gt $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT
|
||||
fi
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ start() {
|
|||
if [ -n "$MAX_LOCKED_MEMORY" ]; then
|
||||
ulimit -l $MAX_LOCKED_MEMORY
|
||||
fi
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -ge $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -gt $(cat /proc/sys/vm/max_map_count) ]; then
|
||||
sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT
|
||||
fi
|
||||
|
||||
|
|
|
@ -8,7 +8,8 @@ CONSOLE" and "COPY AS CURL" in the documentation and are automatically tested
|
|||
by the command `gradle :docs:check`. To test just the docs from a single page,
|
||||
use e.g. `gradle :docs:check -Dtests.method="\*rollover*"`.
|
||||
|
||||
NOTE: If you have an elasticsearch-extra folder alongside your elasticsearch folder, you must temporarily rename it when you are testing 6.3 or later branches.
|
||||
NOTE: If you have an elasticsearch-extra folder alongside your elasticsearch
|
||||
folder, you must temporarily rename it when you are testing 6.3 or later branches.
|
||||
|
||||
By default each `// CONSOLE` snippet runs as its own isolated test. You can
|
||||
manipulate the test execution in the following ways:
|
||||
|
@ -36,7 +37,8 @@ for its modifiers:
|
|||
reason why the test shouldn't be run.
|
||||
* `// TEST[setup:name]`: Run some setup code before running the snippet. This
|
||||
is useful for creating and populating indexes used in the snippet. The setup
|
||||
code is defined in `docs/build.gradle`.
|
||||
code is defined in `docs/build.gradle`. See `// TESTSETUP` below for a
|
||||
similar feature.
|
||||
* `// TEST[warning:some warning]`: Expect the response to include a `Warning`
|
||||
header. If the response doesn't include a `Warning` header with the exact
|
||||
text then the test fails. If the response includes `Warning` headers that
|
||||
|
@ -68,7 +70,9 @@ for its modifiers:
|
|||
a test that runs the setup snippet first. See the "painless" docs for a file
|
||||
that puts this to good use. This is fairly similar to `// TEST[setup:name]`
|
||||
but rather than the setup defined in `docs/build.gradle` the setup is defined
|
||||
right in the documentation file.
|
||||
right in the documentation file. In general, we should prefer `// TESTSETUP`
|
||||
over `// TEST[setup:name]` because it makes it more clear what steps have to
|
||||
be taken before the examples will work.
|
||||
|
||||
In addition to the standard CONSOLE syntax these snippets can contain blocks
|
||||
of yaml surrounded by markers like this:
|
||||
|
|
|
@ -63,6 +63,8 @@ buildRestTests.docs = fileTree(projectDir) {
|
|||
exclude 'README.asciidoc'
|
||||
}
|
||||
|
||||
listSnippets.docs = buildRestTests.docs
|
||||
|
||||
Closure setupTwitter = { String name, int count ->
|
||||
buildRestTests.setups[name] = '''
|
||||
- do:
|
||||
|
@ -225,31 +227,6 @@ buildRestTests.doFirst {
|
|||
buildRestTests.setups['bank'].replace('#bank_data#', accounts)
|
||||
}
|
||||
|
||||
buildRestTests.setups['range_index'] = '''
|
||||
- do :
|
||||
indices.create:
|
||||
index: range_index
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: 2
|
||||
number_of_replicas: 1
|
||||
mappings:
|
||||
_doc:
|
||||
properties:
|
||||
expected_attendees:
|
||||
type: integer_range
|
||||
time_frame:
|
||||
type: date_range
|
||||
format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis
|
||||
- do:
|
||||
bulk:
|
||||
index: range_index
|
||||
type: _doc
|
||||
refresh: true
|
||||
body: |
|
||||
{"index":{"_id": 1}}
|
||||
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''
|
||||
|
||||
// Used by index boost doc
|
||||
buildRestTests.setups['index_boost'] = '''
|
||||
- do:
|
||||
|
@ -603,4 +580,4 @@ buildRestTests.setups['library'] = '''
|
|||
{"index":{"_id": "The Moon is a Harsh Mistress"}}
|
||||
{"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288}
|
||||
|
||||
'''
|
||||
'''
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
[[java-rest-high-explain]]
|
||||
=== Explain API
|
||||
|
||||
The explain api computes a score explanation for a query and a specific document.
|
||||
This can give useful feedback whether a document matches or didn’t match a specific query.
|
||||
|
||||
[[java-rest-high-explain-request]]
|
||||
==== Explain Request
|
||||
|
||||
An `ExplainRequest` expects an `index`, a `type` and an `id` to specify a certain document,
|
||||
and a query represented by `QueryBuilder` to run against it (the way of <<java-rest-high-query-builders, building queries>>).
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== Optional arguments
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> Set a routing parameter
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-preference]
|
||||
--------------------------------------------------
|
||||
<1> Use the preference parameter e.g. to execute the search to prefer local
|
||||
shards. The default is to randomize across shards.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-source]
|
||||
--------------------------------------------------
|
||||
<1> Set to true to retrieve the _source of the document explained. You can also
|
||||
retrieve part of the document by using _source_include & _source_exclude
|
||||
(see <<java-rest-high-document-get-request-optional-arguments, Get API>> for more details)
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-stored-field]
|
||||
--------------------------------------------------
|
||||
<1> Allows to control which stored fields to return as part of the document explained
|
||||
(requires the field to be stored separately in the mappings).
|
||||
|
||||
[[java-rest-high-explain-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
The `explain` method executes the request synchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-explain-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The `explainAsync` method executes the request asynchronously,
|
||||
calling the provided `ActionListener` when the response is ready:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ExplainRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
completes, the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `ExplainResponse` is constructed as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed.
|
||||
<2> Called when the whole `FieldCapabilitiesRequest` fails.
|
||||
|
||||
[[java-rest-high-explain-response]]
|
||||
==== ExplainResponse
|
||||
|
||||
The `ExplainResponse` contains the following information:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-response]
|
||||
--------------------------------------------------
|
||||
<1> The index name of the explained document.
|
||||
<2> The type name of the explained document.
|
||||
<3> The id of the explained document.
|
||||
<4> Indicates whether or not the explained document exists.
|
||||
<5> Indicates whether or not there is a match between the explained document and
|
||||
the provided query (the `match` is retrieved from the lucene `Explanation` behind the scenes
|
||||
if the lucene `Explanation` models a match, it returns `true`, otherwise it returns `false`).
|
||||
<6> Indicates whether or not there exists a lucene `Explanation` for this request.
|
||||
<7> Get the lucene `Explanation` object if there exists.
|
||||
<8> Get the `GetResult` object if the `_source` or the stored fields are retrieved.
|
||||
|
||||
The `GetResult` contains two maps internally to store the fetched `_source` and stored fields.
|
||||
You can use the following methods to get them:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[get-result]
|
||||
--------------------------------------------------
|
||||
<1> Retrieve the `_source` as a map.
|
||||
<2> Retrieve the specified stored fields as a map.
|
|
@ -0,0 +1,81 @@
|
|||
[[java-rest-high-multi-search-template]]
|
||||
=== Multi-Search-Template API
|
||||
|
||||
The `multiSearchTemplate` API executes multiple <<java-rest-high-search-template,`search template`>>
|
||||
requests in a single http request in parallel.
|
||||
|
||||
[[java-rest-high-multi-search-template-request]]
|
||||
==== Multi-Search-Template Request
|
||||
|
||||
The `MultiSearchTemplateRequest` is built empty and you add all of the searches that
|
||||
you wish to execute to it:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-request-inline]
|
||||
--------------------------------------------------
|
||||
<1> Create an empty `MultiSearchTemplateRequest`.
|
||||
<2> Create one or more `SearchTemplateRequest` objects and populate them just like you
|
||||
would for a regular <<java-rest-high-search-template,`search template`>>.
|
||||
<3> Add the `SearchTemplateRequest` to the `MultiSearchTemplateRequest`.
|
||||
|
||||
===== Optional arguments
|
||||
|
||||
The multiSearchTemplate's `max_concurrent_searches` request parameter can be used to control
|
||||
the maximum number of concurrent searches the multi search api will execute.
|
||||
This default is based on the number of data nodes and the default search thread pool size.
|
||||
|
||||
[[java-rest-high-multi-search-template-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
The `multiSearchTemplate` method executes `MultiSearchTemplateRequest`s synchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-request-sync]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-multi-search-template-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The `multiSearchTemplateAsync` method executes `MultiSearchTemplateRequest`s asynchronously,
|
||||
calling the provided `ActionListener` when the response is ready.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-execute-async]
|
||||
--------------------------------------------------
|
||||
The parameters are the `MultiSearchTemplateRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `MultiSearchTemplateResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed.
|
||||
<2> Called when the whole `MultiSearchTemplateRequest` fails.
|
||||
|
||||
==== MultiSearchTemplateResponse
|
||||
|
||||
The `MultiSearchTemplateResponse` that is returned by executing the `multiSearchTemplate` method contains
|
||||
a `MultiSearchTemplateResponse.Item` for each `SearchTemplateRequest` in the
|
||||
`MultiSearchTemplateRequest`. Each `MultiSearchTemplateResponse.Item` contains an
|
||||
exception in `getFailure` if the request failed or a
|
||||
<<java-rest-high-search-response,`SearchResponse`>> in `getResponse` if
|
||||
the request succeeded:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-response]
|
||||
--------------------------------------------------
|
||||
<1> An array of responses is returned - one response for each request
|
||||
<2> Failed search template requests have error messages
|
||||
<3> Successful requests contain a <<java-rest-high-search-response,`SearchResponse`>> in
|
||||
`getResponse`.
|
|
@ -0,0 +1,121 @@
|
|||
[[java-rest-high-snapshot-create-snapshot]]
|
||||
=== Create Snapshot API
|
||||
|
||||
Use the Create Snapshot API to create a new snapshot.
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-request]]
|
||||
==== Create Snapshot Request
|
||||
|
||||
A `CreateSnapshotRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request]
|
||||
--------------------------------------------------
|
||||
|
||||
==== Required Arguments
|
||||
The following arguments are mandatory:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-repositoryName]
|
||||
--------------------------------------------------
|
||||
<1> The name of the repository.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-snapshotName]
|
||||
--------------------------------------------------
|
||||
<1> The name of the snapshot.
|
||||
|
||||
==== Optional Arguments
|
||||
The following arguments are optional:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indices]
|
||||
--------------------------------------------------
|
||||
<1> A list of indices the snapshot is applied to.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indicesOptions]
|
||||
--------------------------------------------------
|
||||
<1> Options applied to the indices.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-partial]
|
||||
--------------------------------------------------
|
||||
<1> Set `partial` to `true` to allow a successful snapshot without the
|
||||
availability of all the indices primary shards. Defaults to `false`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-includeGlobalState]
|
||||
--------------------------------------------------
|
||||
<1> Set `includeGlobalState` to `false` to prevent writing the cluster's global
|
||||
state as part of the snapshot. Defaults to `true`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`.
|
||||
<2> Timeout to connect to the master node as a `String`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-waitForCompletion]
|
||||
--------------------------------------------------
|
||||
<1> Waits for the snapshot to be completed before a response is returned.
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a create snapshot request requires both the
|
||||
`CreateSnapshotRequest` instance and an `ActionListener` instance to be
|
||||
passed as arguments to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `CreateSnapshotRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `CreateSnapshotResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of a failure. The raised exception is provided as an
|
||||
argument.
|
||||
|
||||
[[java-rest-high-snapshot-create-snapshot-response]]
|
||||
==== Snapshot Create Response
|
||||
|
||||
Use the `CreateSnapshotResponse` to retrieve information about the evaluated
|
||||
request:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates the node has started the request.
|
|
@ -0,0 +1,103 @@
|
|||
[[java-rest-high-snapshot-get-snapshots]]
|
||||
=== Get Snapshots API
|
||||
|
||||
Use the Get Snapshot API to get snapshots.
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-request]]
|
||||
==== Get Snapshots Request
|
||||
|
||||
A `GetSnapshotsRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request]
|
||||
--------------------------------------------------
|
||||
|
||||
==== Required Arguments
|
||||
The following arguments are mandatory:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-repositoryName]
|
||||
--------------------------------------------------
|
||||
<1> The name of the repository.
|
||||
|
||||
==== Optional Arguments
|
||||
The following arguments are optional:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-snapshots]
|
||||
--------------------------------------------------
|
||||
<1> An array of snapshots to get. Otherwise it will return all snapshots for a repository.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`.
|
||||
<2> Timeout to connect to the master node as a `String`.
|
||||
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-verbose]
|
||||
--------------------------------------------------
|
||||
<1> `Boolean` indicating if the response should be verbose.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-ignore-unavailable]
|
||||
--------------------------------------------------
|
||||
<1> `Boolean` indicating if unavailable snapshots should be ignored. Otherwise the request will
|
||||
fail if any of the snapshots are unavailable.
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a get snapshots request requires both the
|
||||
`GetSnapshotsRequest` instance and an `ActionListener` instance to be
|
||||
passed as arguments to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetSnapshotsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `GetSnapshotsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of a failure. The raised exception is provided as an
|
||||
argument.
|
||||
|
||||
[[java-rest-high-snapshot-get-snapshots-response]]
|
||||
==== Get Snapshots Response
|
||||
|
||||
Use the `GetSnapshotsResponse` to retrieve information about the evaluated
|
||||
request:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates the node has started the request.
|
|
@ -32,16 +32,20 @@ The Java High Level REST Client supports the following Search APIs:
|
|||
* <<java-rest-high-search-scroll>>
|
||||
* <<java-rest-high-clear-scroll>>
|
||||
* <<java-rest-high-search-template>>
|
||||
* <<java-rest-high-multi-search-template>>
|
||||
* <<java-rest-high-multi-search>>
|
||||
* <<java-rest-high-field-caps>>
|
||||
* <<java-rest-high-rank-eval>>
|
||||
* <<java-rest-high-explain>>
|
||||
|
||||
include::search/search.asciidoc[]
|
||||
include::search/scroll.asciidoc[]
|
||||
include::search/multi-search.asciidoc[]
|
||||
include::search/search-template.asciidoc[]
|
||||
include::search/multi-search-template.asciidoc[]
|
||||
include::search/field-caps.asciidoc[]
|
||||
include::search/rank-eval.asciidoc[]
|
||||
include::search/explain.asciidoc[]
|
||||
|
||||
== Miscellaneous APIs
|
||||
|
||||
|
@ -140,12 +144,16 @@ The Java High Level REST Client supports the following Snapshot APIs:
|
|||
* <<java-rest-high-snapshot-create-repository>>
|
||||
* <<java-rest-high-snapshot-delete-repository>>
|
||||
* <<java-rest-high-snapshot-verify-repository>>
|
||||
* <<java-rest-high-snapshot-create-snapshot>>
|
||||
* <<java-rest-high-snapshot-get-snapshots>>
|
||||
* <<java-rest-high-snapshot-delete-snapshot>>
|
||||
|
||||
include::snapshot/get_repository.asciidoc[]
|
||||
include::snapshot/create_repository.asciidoc[]
|
||||
include::snapshot/delete_repository.asciidoc[]
|
||||
include::snapshot/verify_repository.asciidoc[]
|
||||
include::snapshot/create_snapshot.asciidoc[]
|
||||
include::snapshot/get_snapshots.asciidoc[]
|
||||
include::snapshot/delete_snapshot.asciidoc[]
|
||||
|
||||
== Tasks APIs
|
||||
|
|
|
@ -18,7 +18,7 @@ value. For example, if a value is greater than or equal to 95% of the observed
|
|||
it is said to be at the 95th percentile rank.
|
||||
|
||||
Assume your data consists of website load times. You may have a service agreement that
|
||||
95% of page loads completely within 15ms and 99% of page loads complete within 30ms.
|
||||
95% of page loads completely within 500ms and 99% of page loads complete within 600ms.
|
||||
|
||||
Let's look at a range of percentiles representing load time:
|
||||
|
||||
|
|
|
@ -428,7 +428,7 @@ The `username` and `password` parameters are optional, and when they are present
|
|||
will connect to the remote Elasticsearch node using basic auth. Be sure to use `https` when
|
||||
using basic auth or the password will be sent in plain text.
|
||||
|
||||
Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the
|
||||
Remote hosts have to be explicitly whitelisted in elasticsearch.yml using the
|
||||
`reindex.remote.whitelist` property. It can be set to a comma delimited list
|
||||
of allowed remote `host` and `port` combinations (e.g.
|
||||
`otherhost:9200, another:9200, 127.0.10.*:9200, localhost:*`). Scheme is
|
||||
|
|
|
@ -18,6 +18,9 @@ Below is an example of configuring a mapping with various range fields followed
|
|||
--------------------------------------------------
|
||||
PUT range_index
|
||||
{
|
||||
"settings": {
|
||||
"number_of_shards": 2
|
||||
},
|
||||
"mappings": {
|
||||
"_doc": {
|
||||
"properties": {
|
||||
|
@ -33,7 +36,7 @@ PUT range_index
|
|||
}
|
||||
}
|
||||
|
||||
PUT range_index/_doc/1
|
||||
PUT range_index/_doc/1?refresh
|
||||
{
|
||||
"expected_attendees" : { <2>
|
||||
"gte" : 10,
|
||||
|
@ -46,6 +49,7 @@ PUT range_index/_doc/1
|
|||
}
|
||||
--------------------------------------------------
|
||||
//CONSOLE
|
||||
// TESTSETUP
|
||||
|
||||
<1> `date_range` types accept the same field parameters defined by the <<date, `date`>> type.
|
||||
<2> Example indexing a meeting with 10 to 20 attendees.
|
||||
|
@ -68,7 +72,6 @@ GET range_index/_search
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:range_index]
|
||||
|
||||
The result produced by the above query.
|
||||
|
||||
|
@ -125,7 +128,6 @@ GET range_index/_search
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:range_index]
|
||||
|
||||
<1> Range queries work the same as described in <<query-dsl-range-query, range query>>.
|
||||
<2> Range queries over range <<mapping-types, fields>> support a `relation` parameter which can be one of `WITHIN`, `CONTAINS`,
|
||||
|
@ -191,7 +193,6 @@ PUT range_index/_doc/2
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:range_index]
|
||||
|
||||
[[range-params]]
|
||||
==== Parameters for range fields
|
||||
|
|
|
@ -184,8 +184,6 @@ GET /_search
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
minimum_should_match
|
||||
|
||||
A different
|
||||
<<query-dsl-minimum-should-match,`minimum_should_match`>>
|
||||
can be applied for low and high frequency terms with the additional
|
||||
|
|
|
@ -25,7 +25,7 @@ The queries in this group are:
|
|||
|
||||
The multi-field version of the `match` query.
|
||||
|
||||
<<query-dsl-common-terms-query,`common_terms` query>>::
|
||||
<<query-dsl-common-terms-query,`common` terms query>>::
|
||||
|
||||
A more specialized query which gives more preference to uncommon words.
|
||||
|
||||
|
@ -35,7 +35,7 @@ The queries in this group are:
|
|||
allowing you to specify AND|OR|NOT conditions and multi-field search
|
||||
within a single query string. For expert users only.
|
||||
|
||||
<<query-dsl-simple-query-string-query,`simple_query_string`>>::
|
||||
<<query-dsl-simple-query-string-query,`simple_query_string` query>>::
|
||||
|
||||
A simpler, more robust version of the `query_string` syntax suitable
|
||||
for exposing directly to users.
|
||||
|
|
Binary file not shown.
|
@ -1,6 +1,6 @@
|
|||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.7-all.zip
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.8.1-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
distributionSha256Sum=203f4537da8b8075e38c036a6d14cb71b1149de5bf0a8f6db32ac2833a1d1294
|
||||
zipStorePath=wrapper/dists
|
||||
distributionSha256Sum=ce1645ff129d11aad62dab70d63426fdce6cfd646fa309dc5dc5255dd03c7c11
|
||||
|
|
|
@ -23,13 +23,21 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.CompositeIndicesRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
|
@ -126,4 +134,39 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi
|
|||
}
|
||||
out.writeStreamableList(requests);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
MultiSearchTemplateRequest that = (MultiSearchTemplateRequest) o;
|
||||
return maxConcurrentSearchRequests == that.maxConcurrentSearchRequests &&
|
||||
Objects.equals(requests, that.requests) &&
|
||||
Objects.equals(indicesOptions, that.indicesOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(maxConcurrentSearchRequests, requests, indicesOptions);
|
||||
}
|
||||
|
||||
public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
XContent xContent) throws IOException {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
for (SearchTemplateRequest templateRequest : multiSearchTemplateRequest.requests()) {
|
||||
final SearchRequest searchRequest = templateRequest.getRequest();
|
||||
try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) {
|
||||
MultiSearchRequest.writeSearchRequestParams(searchRequest, xContentBuilder);
|
||||
BytesReference.bytes(xContentBuilder).writeTo(output);
|
||||
}
|
||||
output.write(xContent.streamSeparator());
|
||||
try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) {
|
||||
templateRequest.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
|
||||
BytesReference.bytes(xContentBuilder).writeTo(output);
|
||||
}
|
||||
output.write(xContent.streamSeparator());
|
||||
}
|
||||
return output.toByteArray();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.script.mustache;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -31,6 +32,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -106,6 +108,13 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
|
|||
public Exception getFailure() {
|
||||
return exception;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Item [response=" + response + ", exception=" + exception + "]";
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
private Item[] items;
|
||||
|
@ -117,7 +126,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
|
|||
public MultiSearchTemplateResponse(Item[] items, long tookInMillis) {
|
||||
this.items = items;
|
||||
this.tookInMillis = tookInMillis;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> iterator() {
|
||||
|
@ -184,6 +193,23 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
|
|||
static final class Fields {
|
||||
static final String RESPONSES = "responses";
|
||||
}
|
||||
|
||||
public static MultiSearchTemplateResponse fromXContext(XContentParser parser) {
|
||||
//The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response
|
||||
MultiSearchResponse mSearchResponse = MultiSearchResponse.fromXContext(parser);
|
||||
org.elasticsearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses();
|
||||
Item[] templateResponses = new Item[responses.length];
|
||||
int i = 0;
|
||||
for (org.elasticsearch.action.search.MultiSearchResponse.Item item : responses) {
|
||||
SearchTemplateResponse stResponse = null;
|
||||
if(item.getResponse() != null){
|
||||
stResponse = new SearchTemplateResponse();
|
||||
stResponse.setResponse(item.getResponse());
|
||||
}
|
||||
templateResponses[i++] = new Item(stResponse, item.getFailure());
|
||||
}
|
||||
return new MultiSearchTemplateResponse(templateResponses, mSearchResponse.getTook().millis());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
|
|
@ -66,6 +66,11 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToXC
|
|||
|
||||
public boolean hasResponse() {
|
||||
return response != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SearchTemplateResponse [source=" + source + ", response=" + response + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,14 +19,22 @@
|
|||
|
||||
package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -97,5 +105,57 @@ public class MultiSearchTemplateRequestTests extends ESTestCase {
|
|||
expectThrows(IllegalArgumentException.class, () ->
|
||||
request.maxConcurrentSearchRequests(randomIntBetween(Integer.MIN_VALUE, 0)));
|
||||
}
|
||||
|
||||
public void testMultiSearchTemplateToJson() throws Exception {
|
||||
final int numSearchRequests = randomIntBetween(1, 10);
|
||||
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
|
||||
for (int i = 0; i < numSearchRequests; i++) {
|
||||
// Create a random request.
|
||||
String[] indices = {"test"};
|
||||
SearchRequest searchRequest = new SearchRequest(indices);
|
||||
// scroll is not supported in the current msearch or msearchtemplate api, so unset it:
|
||||
searchRequest.scroll((Scroll) null);
|
||||
// batched reduce size is currently not set-able on a per-request basis as it is a query string parameter only
|
||||
searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE);
|
||||
SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest);
|
||||
|
||||
searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}");
|
||||
searchTemplateRequest.setScriptType(ScriptType.INLINE);
|
||||
searchTemplateRequest.setProfile(randomBoolean());
|
||||
|
||||
Map<String, Object> scriptParams = new HashMap<>();
|
||||
scriptParams.put("field", "name");
|
||||
scriptParams.put("value", randomAlphaOfLengthBetween(2, 5));
|
||||
searchTemplateRequest.setScriptParams(scriptParams);
|
||||
|
||||
multiSearchTemplateRequest.add(searchTemplateRequest);
|
||||
}
|
||||
|
||||
//Serialize the request
|
||||
String serialized = toJsonString(multiSearchTemplateRequest);
|
||||
|
||||
//Deserialize the request
|
||||
RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry())
|
||||
.withContent(new BytesArray(serialized), XContentType.JSON).build();
|
||||
MultiSearchTemplateRequest deser = RestMultiSearchTemplateAction.parseRequest(restRequest, true);
|
||||
|
||||
// For object equality purposes need to set the search requests' source to non-null
|
||||
for (SearchTemplateRequest str : deser.requests()) {
|
||||
SearchRequest sr = str.getRequest();
|
||||
if (sr.source() == null) {
|
||||
sr.source(new SearchSourceBuilder());
|
||||
}
|
||||
}
|
||||
// Compare the deserialized request object with the original request object
|
||||
assertEquals(multiSearchTemplateRequest, deser);
|
||||
|
||||
// Finally, serialize the deserialized request to compare JSON equivalence (in case Object.equals() fails to reveal a discrepancy)
|
||||
assertEquals(serialized, toJsonString(deser));
|
||||
}
|
||||
|
||||
protected String toJsonString(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException {
|
||||
byte[] bytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent());
|
||||
return new String(bytes, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class MultiSearchTemplateResponseTests extends AbstractXContentTestCase<MultiSearchTemplateResponse> {
|
||||
|
||||
@Override
|
||||
protected MultiSearchTemplateResponse createTestInstance() {
|
||||
int numItems = randomIntBetween(0, 128);
|
||||
long overallTookInMillis = randomNonNegativeLong();
|
||||
MultiSearchTemplateResponse.Item[] items = new MultiSearchTemplateResponse.Item[numItems];
|
||||
for (int i = 0; i < numItems; i++) {
|
||||
// Creating a minimal response is OK, because SearchResponse self
|
||||
// is tested elsewhere.
|
||||
long tookInMillis = randomNonNegativeLong();
|
||||
int totalShards = randomIntBetween(1, Integer.MAX_VALUE);
|
||||
int successfulShards = randomIntBetween(0, totalShards);
|
||||
int skippedShards = totalShards - successfulShards;
|
||||
InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty();
|
||||
SearchResponse.Clusters clusters = new SearchResponse.Clusters(totalShards, successfulShards, skippedShards);
|
||||
SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse();
|
||||
SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards,
|
||||
successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters);
|
||||
searchTemplateResponse.setResponse(searchResponse);
|
||||
items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null);
|
||||
}
|
||||
return new MultiSearchTemplateResponse(items, overallTookInMillis);
|
||||
}
|
||||
|
||||
|
||||
private static MultiSearchTemplateResponse createTestInstanceWithFailures() {
|
||||
int numItems = randomIntBetween(0, 128);
|
||||
long overallTookInMillis = randomNonNegativeLong();
|
||||
MultiSearchTemplateResponse.Item[] items = new MultiSearchTemplateResponse.Item[numItems];
|
||||
for (int i = 0; i < numItems; i++) {
|
||||
if (randomBoolean()) {
|
||||
// Creating a minimal response is OK, because SearchResponse self
|
||||
// is tested elsewhere.
|
||||
long tookInMillis = randomNonNegativeLong();
|
||||
int totalShards = randomIntBetween(1, Integer.MAX_VALUE);
|
||||
int successfulShards = randomIntBetween(0, totalShards);
|
||||
int skippedShards = totalShards - successfulShards;
|
||||
InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty();
|
||||
SearchResponse.Clusters clusters = new SearchResponse.Clusters(totalShards, successfulShards, skippedShards);
|
||||
SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse();
|
||||
SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards,
|
||||
successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters);
|
||||
searchTemplateResponse.setResponse(searchResponse);
|
||||
items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null);
|
||||
} else {
|
||||
items[i] = new MultiSearchTemplateResponse.Item(null, new ElasticsearchException("an error"));
|
||||
}
|
||||
}
|
||||
return new MultiSearchTemplateResponse(items, overallTookInMillis);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MultiSearchTemplateResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return MultiSearchTemplateResponse.fromXContext(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected Predicate<String> getRandomFieldsExcludeFilterWhenResultHasErrors() {
|
||||
return field -> field.startsWith("responses");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertEqualInstances(MultiSearchTemplateResponse expectedInstance, MultiSearchTemplateResponse newInstance) {
|
||||
assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook()));
|
||||
assertThat(newInstance.getResponses().length, equalTo(expectedInstance.getResponses().length));
|
||||
for (int i = 0; i < expectedInstance.getResponses().length; i++) {
|
||||
MultiSearchTemplateResponse.Item expectedItem = expectedInstance.getResponses()[i];
|
||||
MultiSearchTemplateResponse.Item actualItem = newInstance.getResponses()[i];
|
||||
if (expectedItem.isFailure()) {
|
||||
assertThat(actualItem.getResponse(), nullValue());
|
||||
assertThat(actualItem.getFailureMessage(), containsString(expectedItem.getFailureMessage()));
|
||||
} else {
|
||||
assertThat(actualItem.getResponse().toString(), equalTo(expectedItem.getResponse().toString()));
|
||||
assertThat(actualItem.getFailure(), nullValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test parsing {@link MultiSearchTemplateResponse} with inner failures as they don't support asserting on xcontent equivalence, given
|
||||
* exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()}
|
||||
* without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end.
|
||||
*/
|
||||
public void testFromXContentWithFailures() throws IOException {
|
||||
Supplier<MultiSearchTemplateResponse> instanceSupplier = MultiSearchTemplateResponseTests::createTestInstanceWithFailures;
|
||||
//with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata,
|
||||
//but that does not bother our assertions, as we only want to test that we don't break.
|
||||
boolean supportsUnknownFields = true;
|
||||
//exceptions are not of the same type whenever parsed back
|
||||
boolean assertToXContentEquivalence = false;
|
||||
AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY,
|
||||
getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance,
|
||||
this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
|
||||
}
|
|
@ -100,14 +100,14 @@ public class AzureBlobContainer extends AbstractBlobContainer {
|
|||
public void deleteBlob(String blobName) throws IOException {
|
||||
logger.trace("deleteBlob({})", blobName);
|
||||
|
||||
if (!blobExists(blobName)) {
|
||||
throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
|
||||
}
|
||||
|
||||
try {
|
||||
blobStore.deleteBlob(buildKey(blobName));
|
||||
} catch (URISyntaxException | StorageException e) {
|
||||
logger.warn("can not access [{}] in container {{}}: {}", blobName, blobStore, e.getMessage());
|
||||
} catch (StorageException e) {
|
||||
if (e.getHttpStatusCode() == HttpURLConnection.HTTP_NOT_FOUND) {
|
||||
throw new NoSuchFileException(e.getMessage());
|
||||
}
|
||||
throw new IOException(e);
|
||||
} catch (URISyntaxException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -150,19 +150,17 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS
|
|||
public void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException {
|
||||
final Tuple<CloudBlobClient, Supplier<OperationContext>> client = client(account);
|
||||
// container name must be lower case.
|
||||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
logger.trace(() -> new ParameterizedMessage("delete files container [{}], path [{}]", container, path));
|
||||
SocketAccess.doPrivilegedVoidException(() -> {
|
||||
if (blobContainer.exists()) {
|
||||
// list the blobs using a flat blob listing mode
|
||||
for (final ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null,
|
||||
client.v2().get())) {
|
||||
final String blobName = blobNameFromUri(blobItem.getUri());
|
||||
logger.trace(() -> new ParameterizedMessage("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri()));
|
||||
// don't call {@code #deleteBlob}, use the same client
|
||||
final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blobName);
|
||||
azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get());
|
||||
}
|
||||
// list the blobs using a flat blob listing mode
|
||||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
for (final ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null,
|
||||
client.v2().get())) {
|
||||
final String blobName = blobNameFromUri(blobItem.getUri());
|
||||
logger.trace(() -> new ParameterizedMessage("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri()));
|
||||
// don't call {@code #deleteBlob}, use the same client
|
||||
final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blobName);
|
||||
azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -192,11 +190,8 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS
|
|||
final Tuple<CloudBlobClient, Supplier<OperationContext>> client = client(account);
|
||||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
return SocketAccess.doPrivilegedException(() -> {
|
||||
if (blobContainer.exists(null, null, client.v2().get())) {
|
||||
final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob);
|
||||
return azureBlob.exists(null, null, client.v2().get());
|
||||
}
|
||||
return false;
|
||||
final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob);
|
||||
return azureBlob.exists(null, null, client.v2().get());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -207,11 +202,9 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS
|
|||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
logger.trace(() -> new ParameterizedMessage("delete blob for container [{}], blob [{}]", container, blob));
|
||||
SocketAccess.doPrivilegedVoidException(() -> {
|
||||
if (blobContainer.exists(null, null, client.v2().get())) {
|
||||
final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob);
|
||||
logger.trace(() -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blob));
|
||||
azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get());
|
||||
}
|
||||
final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob);
|
||||
logger.trace(() -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blob));
|
||||
azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -238,19 +231,17 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS
|
|||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
logger.trace(() -> new ParameterizedMessage("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix));
|
||||
SocketAccess.doPrivilegedVoidException(() -> {
|
||||
if (blobContainer.exists()) {
|
||||
for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false,
|
||||
enumBlobListingDetails, null, client.v2().get())) {
|
||||
final URI uri = blobItem.getUri();
|
||||
logger.trace(() -> new ParameterizedMessage("blob url [{}]", uri));
|
||||
// uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/
|
||||
// this requires 1 + container.length() + 1, with each 1 corresponding to one of the /
|
||||
final String blobPath = uri.getPath().substring(1 + container.length() + 1);
|
||||
final BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties();
|
||||
final String name = blobPath.substring(keyPath.length());
|
||||
logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength()));
|
||||
blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength()));
|
||||
}
|
||||
for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false,
|
||||
enumBlobListingDetails, null, client.v2().get())) {
|
||||
final URI uri = blobItem.getUri();
|
||||
logger.trace(() -> new ParameterizedMessage("blob url [{}]", uri));
|
||||
// uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/
|
||||
// this requires 1 + container.length() + 1, with each 1 corresponding to one of the /
|
||||
final String blobPath = uri.getPath().substring(1 + container.length() + 1);
|
||||
final BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties();
|
||||
final String name = blobPath.substring(keyPath.length());
|
||||
logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength()));
|
||||
blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength()));
|
||||
}
|
||||
});
|
||||
return blobsBuilder.immutableMap();
|
||||
|
@ -264,8 +255,8 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS
|
|||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
final CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName);
|
||||
try {
|
||||
SocketAccess.doPrivilegedVoidException(() -> blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(),
|
||||
null, client.v2().get()));
|
||||
SocketAccess.doPrivilegedVoidException(() ->
|
||||
blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(), null, client.v2().get()));
|
||||
} catch (final StorageException se) {
|
||||
if (se.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT &&
|
||||
StorageErrorCodeStrings.BLOB_ALREADY_EXISTS.equals(se.getErrorCode())) {
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.repositories.azure;
|
|||
import com.microsoft.azure.storage.OperationContext;
|
||||
import com.microsoft.azure.storage.StorageException;
|
||||
import com.microsoft.azure.storage.blob.CloudBlobClient;
|
||||
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
|
@ -72,9 +71,11 @@ public class AzureStorageServiceMock extends AbstractComponent implements AzureS
|
|||
}
|
||||
|
||||
@Override
|
||||
public void deleteFiles(String account, String container, String path) {
|
||||
public void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException {
|
||||
final Map<String, BlobMetaData> blobs = listBlobsByPrefix(account, container, path, null);
|
||||
blobs.keySet().forEach(key -> deleteBlob(account, container, key));
|
||||
for (String key : blobs.keySet()) {
|
||||
deleteBlob(account, container, key);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -83,8 +84,10 @@ public class AzureStorageServiceMock extends AbstractComponent implements AzureS
|
|||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlob(String account, String container, String blob) {
|
||||
blobs.remove(blob);
|
||||
public void deleteBlob(String account, String container, String blob) throws URISyntaxException, StorageException {
|
||||
if (blobs.remove(blob) == null) {
|
||||
throw new StorageException("BlobNotFound", "[" + blob + "] does not exist.", 404, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,6 @@ import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
|
|||
import com.amazonaws.services.s3.model.ObjectListing;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
import com.amazonaws.services.s3.model.StorageClass;
|
||||
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
|
@ -40,7 +39,7 @@ import java.util.Locale;
|
|||
|
||||
class S3BlobStore extends AbstractComponent implements BlobStore {
|
||||
|
||||
private final AwsS3Service service;
|
||||
private final S3Service service;
|
||||
|
||||
private final String clientName;
|
||||
|
||||
|
@ -54,7 +53,7 @@ class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||
|
||||
private final StorageClass storageClass;
|
||||
|
||||
S3BlobStore(Settings settings, AwsS3Service service, String clientName, String bucket, boolean serverSideEncryption,
|
||||
S3BlobStore(Settings settings, S3Service service, String clientName, String bucket, boolean serverSideEncryption,
|
||||
ByteSizeValue bufferSize, String cannedACL, String storageClass) {
|
||||
super(settings);
|
||||
this.service = service;
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
|
@ -156,8 +155,10 @@ class S3Repository extends BlobStoreRepository {
|
|||
/**
|
||||
* Constructs an s3 backed repository
|
||||
*/
|
||||
S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry,
|
||||
AwsS3Service awsService) throws IOException {
|
||||
S3Repository(final RepositoryMetaData metadata,
|
||||
final Settings settings,
|
||||
final NamedXContentRegistry namedXContentRegistry,
|
||||
final S3Service service) throws IOException {
|
||||
super(metadata, settings, namedXContentRegistry);
|
||||
|
||||
final String bucket = BUCKET_SETTING.get(metadata.settings());
|
||||
|
@ -188,9 +189,9 @@ class S3Repository extends BlobStoreRepository {
|
|||
// deprecated behavior: override client credentials from the cluster state
|
||||
// (repository settings)
|
||||
if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) {
|
||||
overrideCredentialsFromClusterState(awsService);
|
||||
overrideCredentialsFromClusterState(service);
|
||||
}
|
||||
blobStore = new S3BlobStore(settings, awsService, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass);
|
||||
blobStore = new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass);
|
||||
|
||||
final String basePath = BASE_PATH_SETTING.get(metadata.settings());
|
||||
if (Strings.hasLength(basePath)) {
|
||||
|
@ -220,13 +221,13 @@ class S3Repository extends BlobStoreRepository {
|
|||
return chunkSize;
|
||||
}
|
||||
|
||||
void overrideCredentialsFromClusterState(AwsS3Service awsService) {
|
||||
void overrideCredentialsFromClusterState(final S3Service s3Service) {
|
||||
deprecationLogger.deprecated("Using s3 access/secret key from repository settings. Instead "
|
||||
+ "store these in named clients and the elasticsearch keystore for secure settings.");
|
||||
final BasicAWSCredentials insecureCredentials = S3ClientSettings.loadDeprecatedCredentials(metadata.settings());
|
||||
// hack, but that's ok because the whole if branch should be axed
|
||||
final Map<String, S3ClientSettings> prevSettings = awsService.refreshAndClearCache(S3ClientSettings.load(Settings.EMPTY));
|
||||
final Map<String, S3ClientSettings> prevSettings = s3Service.refreshAndClearCache(S3ClientSettings.load(Settings.EMPTY));
|
||||
final Map<String, S3ClientSettings> newSettings = S3ClientSettings.overrideCredentials(prevSettings, insecureCredentials);
|
||||
awsService.refreshAndClearCache(newSettings);
|
||||
s3Service.refreshAndClearCache(newSettings);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,14 +19,6 @@
|
|||
|
||||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.amazonaws.util.json.Jackson;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
|
@ -39,6 +31,15 @@ import org.elasticsearch.plugins.ReloadablePlugin;
|
|||
import org.elasticsearch.plugins.RepositoryPlugin;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A plugin to add a repository type that writes to and from the AWS S3.
|
||||
*/
|
||||
|
@ -60,33 +61,29 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo
|
|||
});
|
||||
}
|
||||
|
||||
private final AwsS3Service awsS3Service;
|
||||
private final S3Service service;
|
||||
|
||||
public S3RepositoryPlugin(Settings settings) {
|
||||
this.awsS3Service = getAwsS3Service(settings);
|
||||
public S3RepositoryPlugin(final Settings settings) {
|
||||
this(settings, new S3Service(settings));
|
||||
}
|
||||
|
||||
S3RepositoryPlugin(final Settings settings, final S3Service service) {
|
||||
this.service = Objects.requireNonNull(service, "S3 service must not be null");
|
||||
// eagerly load client settings so that secure settings are read
|
||||
final Map<String, S3ClientSettings> clientsSettings = S3ClientSettings.load(settings);
|
||||
this.awsS3Service.refreshAndClearCache(clientsSettings);
|
||||
}
|
||||
|
||||
protected S3RepositoryPlugin(AwsS3Service awsS3Service) {
|
||||
this.awsS3Service = awsS3Service;
|
||||
this.service.refreshAndClearCache(clientsSettings);
|
||||
}
|
||||
|
||||
// proxy method for testing
|
||||
protected S3Repository getS3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry)
|
||||
throws IOException {
|
||||
return new S3Repository(metadata, settings, namedXContentRegistry, awsS3Service);
|
||||
}
|
||||
|
||||
// proxy method for testing
|
||||
protected AwsS3Service getAwsS3Service(Settings settings) {
|
||||
return new InternalAwsS3Service(settings);
|
||||
protected S3Repository createRepository(final RepositoryMetaData metadata,
|
||||
final Settings settings,
|
||||
final NamedXContentRegistry registry) throws IOException {
|
||||
return new S3Repository(metadata, settings, registry, service);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Repository.Factory> getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) {
|
||||
return Collections.singletonMap(S3Repository.TYPE, (metadata) -> getS3Repository(metadata, env.settings(), namedXContentRegistry));
|
||||
public Map<String, Repository.Factory> getRepositories(final Environment env, final NamedXContentRegistry registry) {
|
||||
return Collections.singletonMap(S3Repository.TYPE, (metadata) -> createRepository(metadata, env.settings(), registry));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -112,11 +109,11 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo
|
|||
public void reload(Settings settings) {
|
||||
// secure settings should be readable
|
||||
final Map<String, S3ClientSettings> clientsSettings = S3ClientSettings.load(settings);
|
||||
awsS3Service.refreshAndClearCache(clientsSettings);
|
||||
service.refreshAndClearCache(clientsSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
awsS3Service.close();
|
||||
service.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,24 +28,25 @@ import com.amazonaws.http.IdleConnectionReaper;
|
|||
import com.amazonaws.internal.StaticCredentialsProvider;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.AmazonS3Client;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
|
||||
class InternalAwsS3Service extends AbstractComponent implements AwsS3Service {
|
||||
class S3Service extends AbstractComponent implements Closeable {
|
||||
|
||||
private volatile Map<String, AmazonS3Reference> clientsCache = emptyMap();
|
||||
private volatile Map<String, S3ClientSettings> clientsSettings = emptyMap();
|
||||
|
||||
InternalAwsS3Service(Settings settings) {
|
||||
S3Service(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
|
@ -55,7 +56,6 @@ class InternalAwsS3Service extends AbstractComponent implements AwsS3Service {
|
|||
* clients are usable until released. On release they will be destroyed instead
|
||||
* to being returned to the cache.
|
||||
*/
|
||||
@Override
|
||||
public synchronized Map<String, S3ClientSettings> refreshAndClearCache(Map<String, S3ClientSettings> clientsSettings) {
|
||||
// shutdown all unused clients
|
||||
// others will shutdown on their respective release
|
||||
|
@ -71,7 +71,6 @@ class InternalAwsS3Service extends AbstractComponent implements AwsS3Service {
|
|||
* Attempts to retrieve a client by name from the cache. If the client does not
|
||||
* exist it will be created.
|
||||
*/
|
||||
@Override
|
||||
public AmazonS3Reference client(String clientName) {
|
||||
AmazonS3Reference clientReference = clientsCache.get(clientName);
|
||||
if ((clientReference != null) && clientReference.tryIncRef()) {
|
|
@ -65,82 +65,6 @@ public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase
|
|||
cleanRepositoryFiles(basePath);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-cloud-aws/issues/211")
|
||||
public void testSimpleWorkflow() {
|
||||
Client client = client();
|
||||
Settings.Builder settings = Settings.builder()
|
||||
.put(S3Repository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(1000, 10000));
|
||||
|
||||
// We sometime test getting the base_path from node settings using repositories.s3.base_path
|
||||
settings.put(S3Repository.BASE_PATH_SETTING.getKey(), basePath);
|
||||
|
||||
logger.info("--> creating s3 repository with bucket[{}] and path [{}]", internalCluster().getInstance(Settings.class).get("repositories.s3.bucket"), basePath);
|
||||
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
|
||||
.setType("s3").setSettings(settings
|
||||
).get();
|
||||
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
|
||||
|
||||
createIndex("test-idx-1", "test-idx-2", "test-idx-3");
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> indexing some data");
|
||||
for (int i = 0; i < 100; i++) {
|
||||
index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i);
|
||||
index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i);
|
||||
index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i);
|
||||
}
|
||||
refresh();
|
||||
assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(100L));
|
||||
assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(100L));
|
||||
assertThat(client.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), equalTo(100L));
|
||||
|
||||
logger.info("--> snapshot");
|
||||
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-3").get();
|
||||
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
|
||||
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
|
||||
|
||||
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
|
||||
|
||||
logger.info("--> delete some data");
|
||||
for (int i = 0; i < 50; i++) {
|
||||
client.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get();
|
||||
}
|
||||
for (int i = 50; i < 100; i++) {
|
||||
client.prepareDelete("test-idx-2", "doc", Integer.toString(i)).get();
|
||||
}
|
||||
for (int i = 0; i < 100; i += 2) {
|
||||
client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get();
|
||||
}
|
||||
refresh();
|
||||
assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(50L));
|
||||
assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(50L));
|
||||
assertThat(client.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), equalTo(50L));
|
||||
|
||||
logger.info("--> close indices");
|
||||
client.admin().indices().prepareClose("test-idx-1", "test-idx-2").get();
|
||||
|
||||
logger.info("--> restore all indices from the snapshot");
|
||||
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
|
||||
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
|
||||
|
||||
ensureGreen();
|
||||
assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(100L));
|
||||
assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(100L));
|
||||
assertThat(client.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), equalTo(50L));
|
||||
|
||||
// Test restore after index deletion
|
||||
logger.info("--> delete indices");
|
||||
cluster().wipeIndices("test-idx-1", "test-idx-2");
|
||||
logger.info("--> restore one index after deletion");
|
||||
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet();
|
||||
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
|
||||
ensureGreen();
|
||||
assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(100L));
|
||||
ClusterState clusterState = client.admin().cluster().prepareState().get().getState();
|
||||
assertThat(clusterState.getMetaData().hasIndex("test-idx-1"), equalTo(true));
|
||||
assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false));
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-cloud-aws/issues/211")
|
||||
public void testEncryption() {
|
||||
Client client = client();
|
||||
|
@ -179,7 +103,7 @@ public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase
|
|||
|
||||
Settings settings = internalCluster().getInstance(Settings.class);
|
||||
Settings bucket = settings.getByPrefix("repositories.s3.");
|
||||
try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) {
|
||||
try (AmazonS3Reference s3Client = internalCluster().getInstance(S3Service.class).client("default")) {
|
||||
String bucketName = bucket.get("bucket");
|
||||
logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath);
|
||||
List<S3ObjectSummary> summaries = s3Client.client().listObjects(bucketName, basePath).getObjectSummaries();
|
||||
|
@ -442,7 +366,7 @@ public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase
|
|||
// We check that settings has been set in elasticsearch.yml integration test file
|
||||
// as described in README
|
||||
assertThat("Your settings in elasticsearch.yml are incorrect. Check README file.", bucketName, notNullValue());
|
||||
try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) {
|
||||
try (AmazonS3Reference s3Client = internalCluster().getInstance(S3Service.class).client("default")) {
|
||||
ObjectListing prevListing = null;
|
||||
//From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html
|
||||
//we can do at most 1K objects per delete
|
||||
|
|
|
@ -40,8 +40,8 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
public void testAWSCredentialsDefaultToInstanceProviders() {
|
||||
final String inexistentClientName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT);
|
||||
final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(Settings.EMPTY, inexistentClientName);
|
||||
final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, clientSettings);
|
||||
assertThat(credentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class));
|
||||
final AWSCredentialsProvider credentialsProvider = S3Service.buildCredentials(logger, clientSettings);
|
||||
assertThat(credentialsProvider, instanceOf(S3Service.PrivilegedInstanceProfileCredentialsProvider.class));
|
||||
}
|
||||
|
||||
public void testAWSCredentialsFromKeystore() {
|
||||
|
@ -60,15 +60,15 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
for (int i = 0; i < clientsCount; i++) {
|
||||
final String clientName = clientNamePrefix + i;
|
||||
final S3ClientSettings someClientSettings = allClientsSettings.get(clientName);
|
||||
final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, someClientSettings);
|
||||
final AWSCredentialsProvider credentialsProvider = S3Service.buildCredentials(logger, someClientSettings);
|
||||
assertThat(credentialsProvider, instanceOf(StaticCredentialsProvider.class));
|
||||
assertThat(credentialsProvider.getCredentials().getAWSAccessKeyId(), is(clientName + "_aws_access_key"));
|
||||
assertThat(credentialsProvider.getCredentials().getAWSSecretKey(), is(clientName + "_aws_secret_key"));
|
||||
}
|
||||
// test default exists and is an Instance provider
|
||||
final S3ClientSettings defaultClientSettings = allClientsSettings.get("default");
|
||||
final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings);
|
||||
assertThat(defaultCredentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class));
|
||||
final AWSCredentialsProvider defaultCredentialsProvider = S3Service.buildCredentials(logger, defaultClientSettings);
|
||||
assertThat(defaultCredentialsProvider, instanceOf(S3Service.PrivilegedInstanceProfileCredentialsProvider.class));
|
||||
}
|
||||
|
||||
public void testSetDefaultCredential() {
|
||||
|
@ -82,7 +82,7 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
assertThat(allClientsSettings.size(), is(1));
|
||||
// test default exists and is an Instance provider
|
||||
final S3ClientSettings defaultClientSettings = allClientsSettings.get("default");
|
||||
final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings);
|
||||
final AWSCredentialsProvider defaultCredentialsProvider = S3Service.buildCredentials(logger, defaultClientSettings);
|
||||
assertThat(defaultCredentialsProvider, instanceOf(StaticCredentialsProvider.class));
|
||||
assertThat(defaultCredentialsProvider.getCredentials().getAWSAccessKeyId(), is(awsAccessKey));
|
||||
assertThat(defaultCredentialsProvider.getCredentials().getAWSSecretKey(), is(awsSecretKey));
|
||||
|
@ -152,7 +152,7 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
int expectedReadTimeout) {
|
||||
|
||||
final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default");
|
||||
final ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings);
|
||||
final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings);
|
||||
|
||||
assertThat(configuration.getResponseMetadataCacheSize(), is(0));
|
||||
assertThat(configuration.getProtocol(), is(expectedProtocol));
|
||||
|
|
|
@ -63,9 +63,9 @@ public class RepositoryCredentialsTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
static final class ProxyInternalAwsS3Service extends InternalAwsS3Service {
|
||||
static final class ProxyS3Service extends S3Service {
|
||||
|
||||
ProxyInternalAwsS3Service(Settings settings) {
|
||||
ProxyS3Service(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
|
@ -77,15 +77,9 @@ public class RepositoryCredentialsTests extends ESTestCase {
|
|||
|
||||
}
|
||||
|
||||
protected ProxyS3RepositoryPlugin(Settings settings) {
|
||||
super(settings);
|
||||
ProxyS3RepositoryPlugin(Settings settings) {
|
||||
super(settings, new ProxyS3Service(settings));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AwsS3Service getAwsS3Service(Settings settings) {
|
||||
return new ProxyInternalAwsS3Service(settings);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testRepositoryCredentialsOverrideSecureCredentials() throws IOException {
|
||||
|
@ -108,7 +102,7 @@ public class RepositoryCredentialsTests extends ESTestCase {
|
|||
.put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key")
|
||||
.put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret").build());
|
||||
try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings);
|
||||
S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY);
|
||||
S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY);
|
||||
AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) {
|
||||
final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials();
|
||||
assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key"));
|
||||
|
@ -131,7 +125,7 @@ public class RepositoryCredentialsTests extends ESTestCase {
|
|||
.put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret")
|
||||
.build());
|
||||
try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(Settings.EMPTY);
|
||||
S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY);
|
||||
S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY);
|
||||
AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) {
|
||||
final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials();
|
||||
assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key"));
|
||||
|
@ -162,7 +156,7 @@ public class RepositoryCredentialsTests extends ESTestCase {
|
|||
}
|
||||
final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", builder.build());
|
||||
try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings);
|
||||
S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) {
|
||||
S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) {
|
||||
try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) {
|
||||
final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials
|
||||
.getCredentials();
|
||||
|
|
|
@ -110,14 +110,14 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa
|
|||
@Override
|
||||
public Map<String, Repository.Factory> getRepositories(final Environment env, final NamedXContentRegistry registry) {
|
||||
return Collections.singletonMap(S3Repository.TYPE,
|
||||
(metadata) -> new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings()) {
|
||||
(metadata) -> new S3Repository(metadata, env.settings(), registry, new S3Service(env.settings()) {
|
||||
@Override
|
||||
public synchronized AmazonS3Reference client(String clientName) {
|
||||
return new AmazonS3Reference(new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass));
|
||||
}
|
||||
}) {
|
||||
@Override
|
||||
void overrideCredentialsFromClusterState(AwsS3Service awsService) {
|
||||
void overrideCredentialsFromClusterState(S3Service awsService) {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -117,7 +117,7 @@ public class S3BlobStoreTests extends ESBlobStoreTestCase {
|
|||
|
||||
final String theClientName = randomAlphaOfLength(4);
|
||||
final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass);
|
||||
final AwsS3Service service = new InternalAwsS3Service(Settings.EMPTY) {
|
||||
final S3Service service = new S3Service(Settings.EMPTY) {
|
||||
@Override
|
||||
public synchronized AmazonS3Reference client(String clientName) {
|
||||
assert theClientName.equals(clientName);
|
||||
|
|
|
@ -20,9 +20,7 @@
|
|||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import com.amazonaws.services.s3.AbstractAmazonS3;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
@ -30,6 +28,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
|||
import org.elasticsearch.repositories.RepositoryException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
@ -51,16 +50,11 @@ public class S3RepositoryTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private static class DummyS3Service extends AbstractLifecycleComponent implements AwsS3Service {
|
||||
private static class DummyS3Service extends S3Service {
|
||||
DummyS3Service() {
|
||||
super(Settings.EMPTY);
|
||||
}
|
||||
@Override
|
||||
protected void doStart() {}
|
||||
@Override
|
||||
protected void doStop() {}
|
||||
@Override
|
||||
protected void doClose() {}
|
||||
|
||||
@Override
|
||||
public AmazonS3Reference client(String clientName) {
|
||||
return new AmazonS3Reference(new DummyS3Client());
|
||||
|
|
|
@ -24,10 +24,10 @@ import java.util.IdentityHashMap;
|
|||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
public class TestAwsS3Service extends InternalAwsS3Service {
|
||||
public class TestAwsS3Service extends S3Service {
|
||||
public static class TestPlugin extends S3RepositoryPlugin {
|
||||
public TestPlugin(Settings settings) {
|
||||
super(new TestAwsS3Service(settings));
|
||||
super(settings, new TestAwsS3Service(settings));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bwc;
|
||||
package org.elasticsearch.upgrades;
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -153,6 +153,21 @@ public class QueryBuilderBWCIT extends ESRestTestCase {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveSnapshotsUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveReposUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveTemplatesUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testQueryBuilderBWC() throws Exception {
|
||||
String index = "queries";
|
||||
if (runningAgainstOldCluster) {
|
|
@ -1,89 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-test'
|
||||
|
||||
// This is a top level task which we will add dependencies to below.
|
||||
// It is a single task that can be used to backcompat tests against all versions.
|
||||
task bwcTest {
|
||||
description = 'Runs backwards compatibility tests.'
|
||||
group = 'verification'
|
||||
}
|
||||
|
||||
for (Version version : bwcVersions.indexCompatible) {
|
||||
String baseName = "v${version}"
|
||||
|
||||
Task oldQueryBuilderTest = tasks.create(name: "${baseName}#oldQueryBuilderTest", type: RestIntegTestTask) {
|
||||
mustRunAfter(precommit)
|
||||
}
|
||||
tasks.getByName("${baseName}#oldQueryBuilderTestRunner").configure {
|
||||
systemProperty 'tests.is_old_cluster', 'true'
|
||||
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
|
||||
}
|
||||
|
||||
configure(extensions.findByName("${baseName}#oldQueryBuilderTestCluster")) {
|
||||
bwcVersion = version
|
||||
numBwcNodes = 1
|
||||
numNodes = 1
|
||||
clusterName = 'query_builder_bwc'
|
||||
setting 'http.content_type.required', 'true'
|
||||
}
|
||||
|
||||
Task upgradedQueryBuilderTest = tasks.create(name: "${baseName}#upgradedQueryBuilderTest", type: RestIntegTestTask) {
|
||||
dependsOn(oldQueryBuilderTest, "${baseName}#oldQueryBuilderTestCluster#stop")
|
||||
}
|
||||
|
||||
configure(extensions.findByName("${baseName}#upgradedQueryBuilderTestCluster")) {
|
||||
dependsOn oldQueryBuilderTest,
|
||||
"${baseName}#oldQueryBuilderTestCluster#stop"
|
||||
clusterName = 'query_builder_bwc'
|
||||
numNodes = 1
|
||||
dataDir = { nodeNum -> oldQueryBuilderTest.nodes[nodeNum].dataDir }
|
||||
cleanShared = false // We want to keep snapshots made by the old cluster!
|
||||
}
|
||||
|
||||
tasks.getByName("${baseName}#upgradedQueryBuilderTestRunner").configure {
|
||||
systemProperty 'tests.is_old_cluster', 'false'
|
||||
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
|
||||
}
|
||||
|
||||
Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") {
|
||||
dependsOn = [upgradedQueryBuilderTest]
|
||||
}
|
||||
|
||||
if (project.bwc_tests_enabled) {
|
||||
bwcTest.dependsOn(versionBwcTest)
|
||||
}
|
||||
}
|
||||
|
||||
test.enabled = false // no unit tests for rolling upgrades, only the rest integration test
|
||||
|
||||
// basic integ tests includes testing bwc against the most recent version
|
||||
task integTest {
|
||||
if (project.bwc_tests_enabled) {
|
||||
final def version = bwcVersions.snapshotsIndexCompatible.first()
|
||||
dependsOn "v${version}#bwcTest"
|
||||
}
|
||||
}
|
||||
|
||||
check.dependsOn(integTest)
|
|
@ -178,8 +178,7 @@ setup() {
|
|||
}
|
||||
|
||||
# Ensures that if $MAX_MAP_COUNT is greater than the set vaule on the OS
|
||||
# we do not attempt to update it this should cover equality as well as I think
|
||||
# we can trust that equality operators work as intended.
|
||||
# we do not attempt to update it.
|
||||
@test "[INIT.D] sysctl is not run when it already has a larger or equal value set" {
|
||||
# intentionally set to the default +1
|
||||
sysctl -q -w vm.max_map_count=262145
|
||||
|
|
|
@ -332,7 +332,7 @@ if (isEclipse == false || project.path == ":server-tests") {
|
|||
dependsOn: test.dependsOn) {
|
||||
configure(BuildPlugin.commonTestConfig(project))
|
||||
classpath = project.test.classpath
|
||||
testClassesDir = project.test.testClassesDir
|
||||
testClassesDirs = project.test.testClassesDirs
|
||||
include '**/*IT.class'
|
||||
}
|
||||
check.dependsOn integTest
|
||||
|
|
|
@ -28,14 +28,17 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.Strings.EMPTY_ARRAY;
|
||||
|
@ -58,7 +61,8 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBo
|
|||
* <li>must not contain invalid file name characters {@link org.elasticsearch.common.Strings#INVALID_FILENAME_CHARS} </li>
|
||||
* </ul>
|
||||
*/
|
||||
public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotRequest> implements IndicesRequest.Replaceable {
|
||||
public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotRequest>
|
||||
implements IndicesRequest.Replaceable, ToXContentObject {
|
||||
|
||||
private String snapshot;
|
||||
|
||||
|
@ -407,6 +411,34 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("repository", repository);
|
||||
builder.field("snapshot", snapshot);
|
||||
builder.startArray("indices");
|
||||
for (String index : indices) {
|
||||
builder.value(index);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.field("partial", partial);
|
||||
if (settings != null) {
|
||||
builder.startObject("settings");
|
||||
if (settings.isEmpty() == false) {
|
||||
settings.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.field("include_global_state", includeGlobalState);
|
||||
if (indicesOptions != null) {
|
||||
indicesOptions.toXContent(builder, params);
|
||||
}
|
||||
builder.field("wait_for_completion", waitForCompletion);
|
||||
builder.field("master_node_timeout", masterNodeTimeout.toString());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
|
@ -416,4 +448,42 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
|
|||
public String getDescription() {
|
||||
return "snapshot [" + repository + ":" + snapshot + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
CreateSnapshotRequest that = (CreateSnapshotRequest) o;
|
||||
return partial == that.partial &&
|
||||
includeGlobalState == that.includeGlobalState &&
|
||||
waitForCompletion == that.waitForCompletion &&
|
||||
Objects.equals(snapshot, that.snapshot) &&
|
||||
Objects.equals(repository, that.repository) &&
|
||||
Arrays.equals(indices, that.indices) &&
|
||||
Objects.equals(indicesOptions, that.indicesOptions) &&
|
||||
Objects.equals(settings, that.settings) &&
|
||||
Objects.equals(masterNodeTimeout, that.masterNodeTimeout);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(snapshot, repository, indicesOptions, partial, settings, includeGlobalState, waitForCompletion);
|
||||
result = 31 * result + Arrays.hashCode(indices);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CreateSnapshotRequest{" +
|
||||
"snapshot='" + snapshot + '\'' +
|
||||
", repository='" + repository + '\'' +
|
||||
", indices=" + (indices == null ? null : Arrays.asList(indices)) +
|
||||
", indicesOptions=" + indicesOptions +
|
||||
", partial=" + partial +
|
||||
", settings=" + settings +
|
||||
", includeGlobalState=" + includeGlobalState +
|
||||
", waitForCompletion=" + waitForCompletion +
|
||||
", masterNodeTimeout=" + masterNodeTimeout +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,10 +25,13 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Create snapshot response
|
||||
|
@ -45,6 +48,10 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
|
|||
CreateSnapshotResponse() {
|
||||
}
|
||||
|
||||
void setSnapshotInfo(SnapshotInfo snapshotInfo) {
|
||||
this.snapshotInfo = snapshotInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns snapshot information if snapshot was completed by the time this method returned or null otherwise.
|
||||
*
|
||||
|
@ -93,4 +100,58 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
|
|||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static CreateSnapshotResponse fromXContent(XContentParser parser) throws IOException {
|
||||
CreateSnapshotResponse createSnapshotResponse = new CreateSnapshotResponse();
|
||||
|
||||
parser.nextToken(); // move to '{'
|
||||
|
||||
if (parser.currentToken() != Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']");
|
||||
}
|
||||
|
||||
parser.nextToken(); // move to 'snapshot' || 'accepted'
|
||||
|
||||
if ("snapshot".equals(parser.currentName())) {
|
||||
createSnapshotResponse.snapshotInfo = SnapshotInfo.fromXContent(parser);
|
||||
} else if ("accepted".equals(parser.currentName())) {
|
||||
parser.nextToken(); // move to 'accepted' field value
|
||||
|
||||
if (parser.booleanValue()) {
|
||||
// ensure accepted is a boolean value
|
||||
}
|
||||
|
||||
parser.nextToken(); // move past 'true'/'false'
|
||||
} else {
|
||||
throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] expected ['snapshot', 'accepted']");
|
||||
}
|
||||
|
||||
if (parser.currentToken() != Token.END_OBJECT) {
|
||||
throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']");
|
||||
}
|
||||
|
||||
parser.nextToken(); // move past '}'
|
||||
|
||||
return createSnapshotResponse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CreateSnapshotResponse{" +
|
||||
"snapshotInfo=" + snapshotInfo +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
CreateSnapshotResponse that = (CreateSnapshotResponse) o;
|
||||
return Objects.equals(snapshotInfo, that.snapshotInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(snapshotInfo);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,23 +20,37 @@
|
|||
package org.elasticsearch.action.admin.cluster.snapshots.get;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Get snapshots response
|
||||
*/
|
||||
public class GetSnapshotsResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<GetSnapshotsResponse, Void> GET_SNAPSHOT_PARSER =
|
||||
new ConstructingObjectParser<>(GetSnapshotsResponse.class.getName(), true,
|
||||
(args) -> new GetSnapshotsResponse((List<SnapshotInfo>) args[0]));
|
||||
|
||||
static {
|
||||
GET_SNAPSHOT_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> SnapshotInfo.SNAPSHOT_INFO_PARSER.apply(p, c).build(), new ParseField("snapshots"));
|
||||
}
|
||||
|
||||
private List<SnapshotInfo> snapshots = Collections.emptyList();
|
||||
|
||||
GetSnapshotsResponse() {
|
||||
|
@ -87,4 +101,20 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContentOb
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static GetSnapshotsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return GET_SNAPSHOT_PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
GetSnapshotsResponse that = (GetSnapshotsResponse) o;
|
||||
return Objects.equals(snapshots, that.snapshots);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(snapshots);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,9 +22,12 @@ package org.elasticsearch.action.explain;
|
|||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.internal.AliasFilter;
|
||||
|
@ -34,7 +37,9 @@ import java.io.IOException;
|
|||
/**
|
||||
* Explain request encapsulating the explain query and document identifier to get an explanation for.
|
||||
*/
|
||||
public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
|
||||
public class ExplainRequest extends SingleShardRequest<ExplainRequest> implements ToXContentObject {
|
||||
|
||||
private static final ParseField QUERY_FIELD = new ParseField("query");
|
||||
|
||||
private String type = "_all";
|
||||
private String id;
|
||||
|
@ -186,4 +191,12 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
|
|||
out.writeOptionalWriteable(fetchSourceContext);
|
||||
out.writeVLong(nowInMillis);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(QUERY_FIELD.getPreferredName(), query);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,11 +21,19 @@ package org.elasticsearch.action.explain;
|
|||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.lucene.Lucene.readExplanation;
|
||||
import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
|
||||
|
@ -33,7 +41,17 @@ import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
|
|||
/**
|
||||
* Response containing the score explanation.
|
||||
*/
|
||||
public class ExplainResponse extends ActionResponse {
|
||||
public class ExplainResponse extends ActionResponse implements StatusToXContentObject {
|
||||
|
||||
private static final ParseField _INDEX = new ParseField("_index");
|
||||
private static final ParseField _TYPE = new ParseField("_type");
|
||||
private static final ParseField _ID = new ParseField("_id");
|
||||
private static final ParseField MATCHED = new ParseField("matched");
|
||||
private static final ParseField EXPLANATION = new ParseField("explanation");
|
||||
private static final ParseField VALUE = new ParseField("value");
|
||||
private static final ParseField DESCRIPTION = new ParseField("description");
|
||||
private static final ParseField DETAILS = new ParseField("details");
|
||||
private static final ParseField GET = new ParseField("get");
|
||||
|
||||
private String index;
|
||||
private String type;
|
||||
|
@ -94,6 +112,11 @@ public class ExplainResponse extends ActionResponse {
|
|||
return getResult;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return exists ? RestStatus.OK : RestStatus.NOT_FOUND;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
@ -129,4 +152,90 @@ public class ExplainResponse extends ActionResponse {
|
|||
getResult.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<ExplainResponse, Boolean> PARSER = new ConstructingObjectParser<>("explain", true,
|
||||
(arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], (String) arg[2], exists, (Explanation) arg[3],
|
||||
(GetResult) arg[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), _TYPE);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID);
|
||||
final ConstructingObjectParser<Explanation, Boolean> explanationParser = new ConstructingObjectParser<>("explanation", true,
|
||||
arg -> {
|
||||
if ((float) arg[0] > 0) {
|
||||
return Explanation.match((float) arg[0], (String) arg[1], (Collection<Explanation>) arg[2]);
|
||||
} else {
|
||||
return Explanation.noMatch((String) arg[1], (Collection<Explanation>) arg[2]);
|
||||
}
|
||||
});
|
||||
explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), VALUE);
|
||||
explanationParser.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION);
|
||||
explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, DETAILS);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, EXPLANATION);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> GetResult.fromXContentEmbedded(p), GET);
|
||||
}
|
||||
|
||||
public static ExplainResponse fromXContent(XContentParser parser, boolean exists) {
|
||||
return PARSER.apply(parser, exists);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(_INDEX.getPreferredName(), index);
|
||||
builder.field(_TYPE.getPreferredName(), type);
|
||||
builder.field(_ID.getPreferredName(), id);
|
||||
builder.field(MATCHED.getPreferredName(), isMatch());
|
||||
if (hasExplanation()) {
|
||||
builder.startObject(EXPLANATION.getPreferredName());
|
||||
buildExplanation(builder, explanation);
|
||||
builder.endObject();
|
||||
}
|
||||
if (getResult != null) {
|
||||
builder.startObject(GET.getPreferredName());
|
||||
getResult.toXContentEmbedded(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
|
||||
builder.field(VALUE.getPreferredName(), explanation.getValue());
|
||||
builder.field(DESCRIPTION.getPreferredName(), explanation.getDescription());
|
||||
Explanation[] innerExps = explanation.getDetails();
|
||||
if (innerExps != null) {
|
||||
builder.startArray(DETAILS.getPreferredName());
|
||||
for (Explanation exp : innerExps) {
|
||||
builder.startObject();
|
||||
buildExplanation(builder, exp);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ExplainResponse other = (ExplainResponse) obj;
|
||||
return index.equals(other.index)
|
||||
&& type.equals(other.type)
|
||||
&& id.equals(other.id)
|
||||
&& Objects.equals(explanation, other.explanation)
|
||||
&& getResult.isExists() == other.getResult.isExists()
|
||||
&& Objects.equals(getResult.sourceAsMap(), other.getResult.sourceAsMap())
|
||||
&& Objects.equals(getResult.getFields(), other.getResult.getFields());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(index, type, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -270,42 +270,7 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
|||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
for (SearchRequest request : multiSearchRequest.requests()) {
|
||||
try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) {
|
||||
xContentBuilder.startObject();
|
||||
if (request.indices() != null) {
|
||||
xContentBuilder.field("index", request.indices());
|
||||
}
|
||||
if (request.indicesOptions() != null && request.indicesOptions() != SearchRequest.DEFAULT_INDICES_OPTIONS) {
|
||||
if (request.indicesOptions().expandWildcardsOpen() && request.indicesOptions().expandWildcardsClosed()) {
|
||||
xContentBuilder.field("expand_wildcards", "all");
|
||||
} else if (request.indicesOptions().expandWildcardsOpen()) {
|
||||
xContentBuilder.field("expand_wildcards", "open");
|
||||
} else if (request.indicesOptions().expandWildcardsClosed()) {
|
||||
xContentBuilder.field("expand_wildcards", "closed");
|
||||
} else {
|
||||
xContentBuilder.field("expand_wildcards", "none");
|
||||
}
|
||||
xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable());
|
||||
xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices());
|
||||
}
|
||||
if (request.types() != null) {
|
||||
xContentBuilder.field("types", request.types());
|
||||
}
|
||||
if (request.searchType() != null) {
|
||||
xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (request.requestCache() != null) {
|
||||
xContentBuilder.field("request_cache", request.requestCache());
|
||||
}
|
||||
if (request.preference() != null) {
|
||||
xContentBuilder.field("preference", request.preference());
|
||||
}
|
||||
if (request.routing() != null) {
|
||||
xContentBuilder.field("routing", request.routing());
|
||||
}
|
||||
if (request.allowPartialSearchResults() != null) {
|
||||
xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults());
|
||||
}
|
||||
xContentBuilder.endObject();
|
||||
writeSearchRequestParams(request, xContentBuilder);
|
||||
BytesReference.bytes(xContentBuilder).writeTo(output);
|
||||
}
|
||||
output.write(xContent.streamSeparator());
|
||||
|
@ -322,5 +287,44 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
|||
}
|
||||
return output.toByteArray();
|
||||
}
|
||||
|
||||
public static void writeSearchRequestParams(SearchRequest request, XContentBuilder xContentBuilder) throws IOException {
|
||||
xContentBuilder.startObject();
|
||||
if (request.indices() != null) {
|
||||
xContentBuilder.field("index", request.indices());
|
||||
}
|
||||
if (request.indicesOptions() != null && request.indicesOptions() != SearchRequest.DEFAULT_INDICES_OPTIONS) {
|
||||
if (request.indicesOptions().expandWildcardsOpen() && request.indicesOptions().expandWildcardsClosed()) {
|
||||
xContentBuilder.field("expand_wildcards", "all");
|
||||
} else if (request.indicesOptions().expandWildcardsOpen()) {
|
||||
xContentBuilder.field("expand_wildcards", "open");
|
||||
} else if (request.indicesOptions().expandWildcardsClosed()) {
|
||||
xContentBuilder.field("expand_wildcards", "closed");
|
||||
} else {
|
||||
xContentBuilder.field("expand_wildcards", "none");
|
||||
}
|
||||
xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable());
|
||||
xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices());
|
||||
}
|
||||
if (request.types() != null) {
|
||||
xContentBuilder.field("types", request.types());
|
||||
}
|
||||
if (request.searchType() != null) {
|
||||
xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (request.requestCache() != null) {
|
||||
xContentBuilder.field("request_cache", request.requestCache());
|
||||
}
|
||||
if (request.preference() != null) {
|
||||
xContentBuilder.field("preference", request.preference());
|
||||
}
|
||||
if (request.routing() != null) {
|
||||
xContentBuilder.field("routing", request.routing());
|
||||
}
|
||||
if (request.allowPartialSearchResults() != null) {
|
||||
xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults());
|
||||
}
|
||||
xContentBuilder.endObject();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -60,6 +60,7 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest
|
|||
private static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false"));
|
||||
|
||||
public static final int DEFAULT_PRE_FILTER_SHARD_SIZE = 128;
|
||||
public static final int DEFAULT_BATCHED_REDUCE_SIZE = 512;
|
||||
|
||||
private SearchType searchType = SearchType.DEFAULT;
|
||||
|
||||
|
@ -79,7 +80,7 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest
|
|||
|
||||
private Scroll scroll;
|
||||
|
||||
private int batchedReduceSize = 512;
|
||||
private int batchedReduceSize = DEFAULT_BATCHED_REDUCE_SIZE;
|
||||
|
||||
private int maxConcurrentShardRequests = 0;
|
||||
|
||||
|
|
|
@ -422,7 +422,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
|
|||
private final int successful;
|
||||
private final int skipped;
|
||||
|
||||
Clusters(int total, int successful, int skipped) {
|
||||
public Clusters(int total, int successful, int skipped) {
|
||||
assert total >= 0 && successful >= 0 && skipped >= 0
|
||||
: "total: " + total + " successful: " + successful + " skipped: " + skipped;
|
||||
assert successful <= total && skipped == total - successful
|
||||
|
|
|
@ -50,6 +50,11 @@ public enum SearchType {
|
|||
*/
|
||||
public static final SearchType DEFAULT = QUERY_THEN_FETCH;
|
||||
|
||||
/**
|
||||
* Non-deprecated types
|
||||
*/
|
||||
public static final SearchType [] CURRENTLY_SUPPORTED = {QUERY_THEN_FETCH, DFS_QUERY_THEN_FETCH};
|
||||
|
||||
private byte id;
|
||||
|
||||
SearchType(byte id) {
|
||||
|
@ -94,4 +99,5 @@ public enum SearchType {
|
|||
throw new IllegalArgumentException("No search type for [" + searchType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,12 +22,15 @@ package org.elasticsearch.action.support;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -38,7 +41,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeSt
|
|||
* Controls how to deal with unavailable concrete indices (closed or missing), how wildcard expressions are expanded
|
||||
* to actual indices (all, closed or open indices) and how to deal with wildcard expressions that resolve to no indices.
|
||||
*/
|
||||
public class IndicesOptions {
|
||||
public class IndicesOptions implements ToXContentFragment {
|
||||
|
||||
public enum WildcardStates {
|
||||
OPEN,
|
||||
|
@ -313,6 +316,18 @@ public class IndicesOptions {
|
|||
defaultSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startArray("expand_wildcards");
|
||||
for (WildcardStates expandWildcard : expandWildcards) {
|
||||
builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
builder.endArray();
|
||||
builder.field("ignore_unavailable", ignoreUnavailable());
|
||||
builder.field("allow_no_indices", allowNoIndices());
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the name represents a valid name for one of the indices option
|
||||
* false otherwise
|
||||
|
|
|
@ -29,19 +29,17 @@ import org.apache.lucene.store.IOContext;
|
|||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.OutputStreamIndexOutput;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.lucene.store.IndexOutputOutputStream;
|
||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
@ -54,7 +52,6 @@ import java.nio.file.StandardCopyOption;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -70,9 +67,8 @@ public abstract class MetaDataStateFormat<T> {
|
|||
public static final String STATE_FILE_EXTENSION = ".st";
|
||||
|
||||
private static final String STATE_FILE_CODEC = "state";
|
||||
private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 0;
|
||||
private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 1;
|
||||
private static final int STATE_FILE_VERSION = 1;
|
||||
private static final int STATE_FILE_VERSION_ES_2X_AND_BELOW = 0;
|
||||
private static final int BUFFER_SIZE = 4096;
|
||||
private final String prefix;
|
||||
private final Pattern stateFilePattern;
|
||||
|
@ -186,16 +182,11 @@ public abstract class MetaDataStateFormat<T> {
|
|||
try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) {
|
||||
// We checksum the entire file before we even go and parse it. If it's corrupted we barf right here.
|
||||
CodecUtil.checksumEntireFile(indexInput);
|
||||
final int fileVersion = CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION,
|
||||
STATE_FILE_VERSION);
|
||||
CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION);
|
||||
final XContentType xContentType = XContentType.values()[indexInput.readInt()];
|
||||
if (xContentType != FORMAT) {
|
||||
throw new IllegalStateException("expected state in " + file + " to be " + FORMAT + " format but was " + xContentType);
|
||||
}
|
||||
if (fileVersion == STATE_FILE_VERSION_ES_2X_AND_BELOW) {
|
||||
// format version 0, wrote a version that always came from the content state file and was never used
|
||||
indexInput.readLong(); // version currently unused
|
||||
}
|
||||
long filePointer = indexInput.getFilePointer();
|
||||
long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer;
|
||||
try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) {
|
||||
|
@ -263,10 +254,9 @@ public abstract class MetaDataStateFormat<T> {
|
|||
* @param dataLocations the data-locations to try.
|
||||
* @return the latest state or <code>null</code> if no state was found.
|
||||
*/
|
||||
public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException {
|
||||
public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException {
|
||||
List<PathAndStateId> files = new ArrayList<>();
|
||||
long maxStateId = -1;
|
||||
boolean maxStateIdIsLegacy = true;
|
||||
if (dataLocations != null) { // select all eligible files first
|
||||
for (Path dataLocation : dataLocations) {
|
||||
final Path stateDir = dataLocation.resolve(STATE_DIR_NAME);
|
||||
|
@ -280,9 +270,7 @@ public abstract class MetaDataStateFormat<T> {
|
|||
if (matcher.matches()) {
|
||||
final long stateId = Long.parseLong(matcher.group(1));
|
||||
maxStateId = Math.max(maxStateId, stateId);
|
||||
final boolean legacy = MetaDataStateFormat.STATE_FILE_EXTENSION.equals(matcher.group(2)) == false;
|
||||
maxStateIdIsLegacy &= legacy; // on purpose, see NOTE below
|
||||
PathAndStateId pav = new PathAndStateId(stateFile, stateId, legacy);
|
||||
PathAndStateId pav = new PathAndStateId(stateFile, stateId);
|
||||
logger.trace("found state file: {}", pav);
|
||||
files.add(pav);
|
||||
}
|
||||
|
@ -292,39 +280,19 @@ public abstract class MetaDataStateFormat<T> {
|
|||
}
|
||||
}
|
||||
}
|
||||
final List<Throwable> exceptions = new ArrayList<>();
|
||||
T state = null;
|
||||
// NOTE: we might have multiple version of the latest state if there are multiple data dirs.. for this case
|
||||
// we iterate only over the ones with the max version. If we have at least one state file that uses the
|
||||
// new format (ie. legacy == false) then we know that the latest version state ought to use this new format.
|
||||
// In case the state file with the latest version does not use the new format while older state files do,
|
||||
// the list below will be empty and loading the state will fail
|
||||
// we iterate only over the ones with the max version.
|
||||
long finalMaxStateId = maxStateId;
|
||||
Collection<PathAndStateId> pathAndStateIds = files
|
||||
.stream()
|
||||
.filter(new StateIdAndLegacyPredicate(maxStateId, maxStateIdIsLegacy))
|
||||
.filter(pathAndStateId -> pathAndStateId.id == finalMaxStateId)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
|
||||
final List<Throwable> exceptions = new ArrayList<>();
|
||||
for (PathAndStateId pathAndStateId : pathAndStateIds) {
|
||||
try {
|
||||
final Path stateFile = pathAndStateId.file;
|
||||
final long id = pathAndStateId.id;
|
||||
if (pathAndStateId.legacy) { // read the legacy format -- plain XContent
|
||||
final byte[] data = Files.readAllBytes(stateFile);
|
||||
if (data.length == 0) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
|
||||
continue;
|
||||
}
|
||||
try (XContentParser parser = XContentHelper
|
||||
.createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, new BytesArray(data))) {
|
||||
state = fromXContent(parser);
|
||||
}
|
||||
if (state == null) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
|
||||
}
|
||||
} else {
|
||||
state = read(namedXContentRegistry, stateFile);
|
||||
logger.trace("state id [{}] read from [{}]", id, stateFile.getFileName());
|
||||
}
|
||||
T state = read(namedXContentRegistry, pathAndStateId.file);
|
||||
logger.trace("state id [{}] read from [{}]", pathAndStateId.id, pathAndStateId.file.getFileName());
|
||||
return state;
|
||||
} catch (Exception e) {
|
||||
exceptions.add(new IOException("failed to read " + pathAndStateId.toString(), e));
|
||||
|
@ -338,46 +306,24 @@ public abstract class MetaDataStateFormat<T> {
|
|||
// We have some state files but none of them gave us a usable state
|
||||
throw new IllegalStateException("Could not find a state file to recover from among " + files);
|
||||
}
|
||||
return state;
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters out all {@link org.elasticsearch.gateway.MetaDataStateFormat.PathAndStateId} instances with a different id than
|
||||
* the given one.
|
||||
*/
|
||||
private static final class StateIdAndLegacyPredicate implements Predicate<PathAndStateId> {
|
||||
private final long id;
|
||||
private final boolean legacy;
|
||||
|
||||
StateIdAndLegacyPredicate(long id, boolean legacy) {
|
||||
this.id = id;
|
||||
this.legacy = legacy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean test(PathAndStateId input) {
|
||||
return input.id == id && input.legacy == legacy;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal struct-like class that holds the parsed state id, the file
|
||||
* and a flag if the file is a legacy state ie. pre 1.5
|
||||
* Internal struct-like class that holds the parsed state id and the file
|
||||
*/
|
||||
private static class PathAndStateId {
|
||||
final Path file;
|
||||
final long id;
|
||||
final boolean legacy;
|
||||
|
||||
private PathAndStateId(Path file, long id, boolean legacy) {
|
||||
private PathAndStateId(Path file, long id) {
|
||||
this.file = file;
|
||||
this.id = id;
|
||||
this.legacy = legacy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[id:" + id + ", legacy:" + legacy + ", file:" + file.toAbsolutePath() + "]";
|
||||
return "[id:" + id + ", file:" + file.toAbsolutePath() + "]";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -257,7 +257,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT,
|
||||
IndexMetaData::fromXContent, namedXContentRegistry, isCompress());
|
||||
snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT,
|
||||
SnapshotInfo::fromXContent, namedXContentRegistry, isCompress());
|
||||
SnapshotInfo::fromXContentInternal, namedXContentRegistry, isCompress());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,30 +19,22 @@
|
|||
|
||||
package org.elasticsearch.rest.action.search;
|
||||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.action.RestActions;
|
||||
import org.elasticsearch.rest.action.RestBuilderListener;
|
||||
import org.elasticsearch.rest.action.RestStatusToXContentListener;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
|
||||
/**
|
||||
* Rest action for computing a score explanation for specific documents.
|
||||
|
@ -89,57 +81,6 @@ public class RestExplainAction extends BaseRestHandler {
|
|||
|
||||
explainRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request));
|
||||
|
||||
return channel -> client.explain(explainRequest, new RestBuilderListener<ExplainResponse>(channel) {
|
||||
@Override
|
||||
public RestResponse buildResponse(ExplainResponse response, XContentBuilder builder) throws Exception {
|
||||
builder.startObject();
|
||||
builder.field(Fields._INDEX, response.getIndex())
|
||||
.field(Fields._TYPE, response.getType())
|
||||
.field(Fields._ID, response.getId())
|
||||
.field(Fields.MATCHED, response.isMatch());
|
||||
|
||||
if (response.hasExplanation()) {
|
||||
builder.startObject(Fields.EXPLANATION);
|
||||
buildExplanation(builder, response.getExplanation());
|
||||
builder.endObject();
|
||||
}
|
||||
GetResult getResult = response.getGetResult();
|
||||
if (getResult != null) {
|
||||
builder.startObject(Fields.GET);
|
||||
response.getGetResult().toXContentEmbedded(builder, request);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return new BytesRestResponse(response.isExists() ? OK : NOT_FOUND, builder);
|
||||
}
|
||||
|
||||
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
|
||||
builder.field(Fields.VALUE, explanation.getValue());
|
||||
builder.field(Fields.DESCRIPTION, explanation.getDescription());
|
||||
Explanation[] innerExps = explanation.getDetails();
|
||||
if (innerExps != null) {
|
||||
builder.startArray(Fields.DETAILS);
|
||||
for (Explanation exp : innerExps) {
|
||||
builder.startObject();
|
||||
buildExplanation(builder, exp);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static class Fields {
|
||||
static final String _INDEX = "_index";
|
||||
static final String _TYPE = "_type";
|
||||
static final String _ID = "_id";
|
||||
static final String MATCHED = "matched";
|
||||
static final String EXPLANATION = "explanation";
|
||||
static final String VALUE = "value";
|
||||
static final String DESCRIPTION = "description";
|
||||
static final String DETAILS = "details";
|
||||
static final String GET = "get";
|
||||
|
||||
return channel -> client.explain(explainRequest, new RestStatusToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -497,8 +497,8 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
|
|||
* This parser outputs a temporary map of the objects needed to create the
|
||||
* SearchHit instead of directly creating the SearchHit. The reason for this
|
||||
* is that this way we can reuse the parser when parsing xContent from
|
||||
* {@link CompletionSuggestion.Entry.Option} which unfortunately inlines the
|
||||
* output of
|
||||
* {@link org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option} which unfortunately inlines
|
||||
* the output of
|
||||
* {@link #toInnerXContent(XContentBuilder, org.elasticsearch.common.xcontent.ToXContent.Params)}
|
||||
* of the included search hit. The output of the map is used to create the
|
||||
* actual SearchHit instance via {@link #createFromMap(Map)}
|
||||
|
|
|
@ -200,7 +200,7 @@ final class CompositeAggregator extends BucketsAggregator {
|
|||
|
||||
/**
|
||||
* Replay the documents that might contain a top bucket and pass top buckets to
|
||||
* the {@link this#deferredCollectors}.
|
||||
* the {@link #deferredCollectors}.
|
||||
*/
|
||||
private void runDeferredCollections() throws IOException {
|
||||
final boolean needsScores = needsScores();
|
||||
|
|
|
@ -49,7 +49,7 @@ final class CompositeValuesCollectorQueue implements Releasable {
|
|||
*
|
||||
* @param sources The list of {@link CompositeValuesSourceConfig} to build the composite buckets.
|
||||
* @param size The number of composite buckets to keep.
|
||||
* @param afterKey
|
||||
* @param afterKey composite key
|
||||
*/
|
||||
CompositeValuesCollectorQueue(BigArrays bigArrays, SingleDimensionValuesSource<?>[] sources, int size, CompositeKey afterKey) {
|
||||
this.bigArrays = bigArrays;
|
||||
|
|
|
@ -74,7 +74,7 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
|
|||
* The current value is filled by a {@link LeafBucketCollector} that visits all the
|
||||
* values of each document. This method saves this current value in a slot and should only be used
|
||||
* in the context of a collection.
|
||||
* See {@link this#getLeafCollector}.
|
||||
* See {@link #getLeafCollector}.
|
||||
*/
|
||||
abstract void copyCurrent(int slot);
|
||||
|
||||
|
@ -87,7 +87,7 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
|
|||
* The current value is filled by a {@link LeafBucketCollector} that visits all the
|
||||
* values of each document. This method compares this current value with the value present in
|
||||
* the provided slot and should only be used in the context of a collection.
|
||||
* See {@link this#getLeafCollector}.
|
||||
* See {@link #getLeafCollector}.
|
||||
*/
|
||||
abstract int compareCurrent(int slot);
|
||||
|
||||
|
@ -95,7 +95,7 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
|
|||
* The current value is filled by a {@link LeafBucketCollector} that visits all the
|
||||
* values of each document. This method compares this current value with the after value
|
||||
* set on this source and should only be used in the context of a collection.
|
||||
* See {@link this#getLeafCollector}.
|
||||
* See {@link #getLeafCollector}.
|
||||
*/
|
||||
abstract int compareCurrentWithAfter();
|
||||
|
||||
|
@ -120,7 +120,7 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
|
|||
* Creates a {@link LeafBucketCollector} that extracts all values from a document and invokes
|
||||
* {@link LeafBucketCollector#collect} on the provided <code>next</code> collector for each of them.
|
||||
* The current value of this source is set on each call and can be accessed by <code>next</code> via
|
||||
* the {@link this#copyCurrent(int)} and {@link this#compareCurrent(int)} methods. Note that these methods
|
||||
* the {@link #copyCurrent(int)} and {@link #compareCurrent(int)} methods. Note that these methods
|
||||
* are only valid when invoked from the {@link LeafBucketCollector} created in this source.
|
||||
*/
|
||||
abstract LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollector next) throws IOException;
|
||||
|
|
|
@ -23,12 +23,14 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -79,6 +81,155 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
private static final Comparator<SnapshotInfo> COMPARATOR =
|
||||
Comparator.comparing(SnapshotInfo::startTime).thenComparing(SnapshotInfo::snapshotId);
|
||||
|
||||
public static final class SnapshotInfoBuilder {
|
||||
private String snapshotName = null;
|
||||
private String snapshotUUID = null;
|
||||
private String state = null;
|
||||
private String reason = null;
|
||||
private List<String> indices = null;
|
||||
private long startTime = 0L;
|
||||
private long endTime = 0L;
|
||||
private ShardStatsBuilder shardStatsBuilder = null;
|
||||
private Boolean includeGlobalState = null;
|
||||
private int version = -1;
|
||||
private List<SnapshotShardFailure> shardFailures = null;
|
||||
|
||||
private void setSnapshotName(String snapshotName) {
|
||||
this.snapshotName = snapshotName;
|
||||
}
|
||||
|
||||
private void setSnapshotUUID(String snapshotUUID) {
|
||||
this.snapshotUUID = snapshotUUID;
|
||||
}
|
||||
|
||||
private void setState(String state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
private void setReason(String reason) {
|
||||
this.reason = reason;
|
||||
}
|
||||
|
||||
private void setIndices(List<String> indices) {
|
||||
this.indices = indices;
|
||||
}
|
||||
|
||||
private void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
private void setEndTime(long endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) {
|
||||
this.shardStatsBuilder = shardStatsBuilder;
|
||||
}
|
||||
|
||||
private void setIncludeGlobalState(Boolean includeGlobalState) {
|
||||
this.includeGlobalState = includeGlobalState;
|
||||
}
|
||||
|
||||
private void setVersion(int version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
private void setShardFailures(List<SnapshotShardFailure> shardFailures) {
|
||||
this.shardFailures = shardFailures;
|
||||
}
|
||||
|
||||
private void ignoreVersion(String version) {
|
||||
// ignore extra field
|
||||
}
|
||||
|
||||
private void ignoreStartTime(String startTime) {
|
||||
// ignore extra field
|
||||
}
|
||||
|
||||
private void ignoreEndTime(String endTime) {
|
||||
// ignore extra field
|
||||
}
|
||||
|
||||
private void ignoreDurationInMillis(long durationInMillis) {
|
||||
// ignore extra field
|
||||
}
|
||||
|
||||
public SnapshotInfo build() {
|
||||
SnapshotId snapshotId = new SnapshotId(snapshotName, snapshotUUID);
|
||||
|
||||
if (indices == null) {
|
||||
indices = Collections.emptyList();
|
||||
}
|
||||
|
||||
SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state);
|
||||
Version version = this.version == -1 ? Version.CURRENT : Version.fromId(this.version);
|
||||
|
||||
int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getTotalShards();
|
||||
int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getSuccessfulShards();
|
||||
|
||||
if (shardFailures == null) {
|
||||
shardFailures = new ArrayList<>();
|
||||
}
|
||||
|
||||
return new SnapshotInfo(snapshotId, indices, snapshotState, reason, version, startTime, endTime,
|
||||
totalShards, successfulShards, shardFailures, includeGlobalState);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class ShardStatsBuilder {
|
||||
private int totalShards;
|
||||
private int successfulShards;
|
||||
|
||||
private void setTotalShards(int totalShards) {
|
||||
this.totalShards = totalShards;
|
||||
}
|
||||
|
||||
int getTotalShards() {
|
||||
return totalShards;
|
||||
}
|
||||
|
||||
private void setSuccessfulShards(int successfulShards) {
|
||||
this.successfulShards = successfulShards;
|
||||
}
|
||||
|
||||
int getSuccessfulShards() {
|
||||
return successfulShards;
|
||||
}
|
||||
|
||||
private void ignoreFailedShards(int failedShards) {
|
||||
// ignore extra field
|
||||
}
|
||||
}
|
||||
|
||||
public static final ObjectParser<SnapshotInfoBuilder, Void> SNAPSHOT_INFO_PARSER =
|
||||
new ObjectParser<>(SnapshotInfoBuilder.class.getName(), true, SnapshotInfoBuilder::new);
|
||||
|
||||
private static final ObjectParser<ShardStatsBuilder, Void> SHARD_STATS_PARSER =
|
||||
new ObjectParser<>(ShardStatsBuilder.class.getName(), true, ShardStatsBuilder::new);
|
||||
|
||||
static {
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT));
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID));
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE));
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON));
|
||||
SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES));
|
||||
SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS));
|
||||
SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS));
|
||||
SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS));
|
||||
SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE));
|
||||
SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID));
|
||||
SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setShardFailures, SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER,
|
||||
new ParseField(FAILURES));
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreVersion, new ParseField(VERSION));
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreStartTime, new ParseField(START_TIME));
|
||||
SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreEndTime, new ParseField(END_TIME));
|
||||
SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::ignoreDurationInMillis, new ParseField(DURATION_IN_MILLIS));
|
||||
|
||||
SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL));
|
||||
SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL));
|
||||
SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::ignoreFailedShards, new ParseField(FAILED));
|
||||
}
|
||||
|
||||
private final SnapshotId snapshotId;
|
||||
|
||||
@Nullable
|
||||
|
@ -317,29 +468,21 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
return COMPARATOR.compare(this, o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final SnapshotInfo that = (SnapshotInfo) o;
|
||||
return startTime == that.startTime && snapshotId.equals(that.snapshotId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = snapshotId.hashCode();
|
||||
result = 31 * result + Long.hashCode(startTime);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SnapshotInfo[snapshotId=" + snapshotId + ", state=" + state + ", indices=" + indices + "]";
|
||||
return "SnapshotInfo{" +
|
||||
"snapshotId=" + snapshotId +
|
||||
", state=" + state +
|
||||
", reason='" + reason + '\'' +
|
||||
", indices=" + indices +
|
||||
", startTime=" + startTime +
|
||||
", endTime=" + endTime +
|
||||
", totalShards=" + totalShards +
|
||||
", successfulShards=" + successfulShards +
|
||||
", includeGlobalState=" + includeGlobalState +
|
||||
", version=" + version +
|
||||
", shardFailures=" + shardFailures +
|
||||
'}';
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -360,7 +503,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
// write snapshot info to repository snapshot blob format
|
||||
if (CONTEXT_MODE_SNAPSHOT.equals(params.param(CONTEXT_MODE_PARAM))) {
|
||||
return toXContentSnapshot(builder, params);
|
||||
return toXContentInternal(builder, params);
|
||||
}
|
||||
|
||||
final boolean verbose = params.paramAsBoolean("verbose", GetSnapshotsRequest.DEFAULT_VERBOSE_MODE);
|
||||
|
@ -415,7 +558,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
return builder;
|
||||
}
|
||||
|
||||
private XContentBuilder toXContentSnapshot(final XContentBuilder builder, final ToXContent.Params params) throws IOException {
|
||||
private XContentBuilder toXContentInternal(final XContentBuilder builder, final ToXContent.Params params) throws IOException {
|
||||
builder.startObject(SNAPSHOT);
|
||||
builder.field(NAME, snapshotId.getName());
|
||||
builder.field(UUID, snapshotId.getUUID());
|
||||
|
@ -448,12 +591,20 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method creates a SnapshotInfo from external x-content. It does not
|
||||
* handle x-content written with the internal version.
|
||||
*/
|
||||
public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException {
|
||||
return SNAPSHOT_INFO_PARSER.parse(parser, null).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method creates a SnapshotInfo from internal x-content. It does not
|
||||
* handle x-content written with the external version as external x-content
|
||||
* is only for display purposes and does not need to be parsed.
|
||||
*/
|
||||
public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException {
|
||||
public static SnapshotInfo fromXContentInternal(final XContentParser parser) throws IOException {
|
||||
String name = null;
|
||||
String uuid = null;
|
||||
Version version = Version.CURRENT;
|
||||
|
@ -607,4 +758,28 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
SnapshotInfo that = (SnapshotInfo) o;
|
||||
return startTime == that.startTime &&
|
||||
endTime == that.endTime &&
|
||||
totalShards == that.totalShards &&
|
||||
successfulShards == that.successfulShards &&
|
||||
Objects.equals(snapshotId, that.snapshotId) &&
|
||||
state == that.state &&
|
||||
Objects.equals(reason, that.reason) &&
|
||||
Objects.equals(indices, that.indices) &&
|
||||
Objects.equals(includeGlobalState, that.includeGlobalState) &&
|
||||
Objects.equals(version, that.version) &&
|
||||
Objects.equals(shardFailures, that.shardFailures);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
return Objects.hash(snapshotId, state, reason, indices, startTime, endTime,
|
||||
totalShards, successfulShards, includeGlobalState, version, shardFailures);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,8 +23,10 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -33,6 +35,7 @@ import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Stores information about failures that occurred during shard snapshotting process
|
||||
|
@ -59,11 +62,23 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
* @param reason failure reason
|
||||
*/
|
||||
public SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String reason) {
|
||||
this(nodeId, shardId, reason, RestStatus.INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs new snapshot shard failure object
|
||||
*
|
||||
* @param nodeId node where failure occurred
|
||||
* @param shardId shard id
|
||||
* @param reason failure reason
|
||||
* @param status rest status
|
||||
*/
|
||||
private SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String reason, RestStatus status) {
|
||||
assert reason != null;
|
||||
this.nodeId = nodeId;
|
||||
this.shardId = shardId;
|
||||
this.reason = reason;
|
||||
assert reason != null;
|
||||
status = RestStatus.INTERNAL_SERVER_ERROR;
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -99,7 +114,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
/**
|
||||
* Returns REST status corresponding to this failure
|
||||
*
|
||||
* @return REST status
|
||||
* @return REST STATUS
|
||||
*/
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
|
@ -151,7 +166,12 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return shardId + " failed, reason [" + reason + "]";
|
||||
return "SnapshotShardFailure{" +
|
||||
"shardId=" + shardId +
|
||||
", reason='" + reason + '\'' +
|
||||
", nodeId='" + nodeId + '\'' +
|
||||
", status=" + status +
|
||||
'}';
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -167,6 +187,57 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
static final ConstructingObjectParser<SnapshotShardFailure, Void> SNAPSHOT_SHARD_FAILURE_PARSER =
|
||||
new ConstructingObjectParser<>("shard_failure", true, SnapshotShardFailure::constructSnapshotShardFailure);
|
||||
|
||||
static {
|
||||
SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("index"));
|
||||
SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("index_uuid"));
|
||||
SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("node_id"));
|
||||
// Workaround for https://github.com/elastic/elasticsearch/issues/25878
|
||||
// Some old snapshot might still have null in shard failure reasons
|
||||
SNAPSHOT_SHARD_FAILURE_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("reason"));
|
||||
SNAPSHOT_SHARD_FAILURE_PARSER.declareInt(ConstructingObjectParser.constructorArg(), new ParseField("shard_id"));
|
||||
SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("status"));
|
||||
}
|
||||
|
||||
private static SnapshotShardFailure constructSnapshotShardFailure(Object[] args) {
|
||||
String index = (String) args[0];
|
||||
String indexUuid = (String) args[1];
|
||||
String nodeId = (String) args[2];
|
||||
String reason = (String) args[3];
|
||||
Integer intShardId = (Integer) args[4];
|
||||
String status = (String) args[5];
|
||||
|
||||
if (index == null) {
|
||||
throw new ElasticsearchParseException("index name was not set");
|
||||
}
|
||||
if (intShardId == null) {
|
||||
throw new ElasticsearchParseException("index shard was not set");
|
||||
}
|
||||
|
||||
ShardId shardId = new ShardId(index, indexUuid != null ? indexUuid : IndexMetaData.INDEX_UUID_NA_VALUE, intShardId);
|
||||
|
||||
// Workaround for https://github.com/elastic/elasticsearch/issues/25878
|
||||
// Some old snapshot might still have null in shard failure reasons
|
||||
String nonNullReason;
|
||||
if (reason != null) {
|
||||
nonNullReason = reason;
|
||||
} else {
|
||||
nonNullReason = "";
|
||||
}
|
||||
|
||||
|
||||
RestStatus restStatus;
|
||||
if (status != null) {
|
||||
restStatus = RestStatus.valueOf(status);
|
||||
} else {
|
||||
restStatus = RestStatus.INTERNAL_SERVER_ERROR;
|
||||
}
|
||||
|
||||
return new SnapshotShardFailure(nodeId, shardId, nonNullReason, restStatus);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserializes snapshot failure information from JSON
|
||||
*
|
||||
|
@ -174,56 +245,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
* @return snapshot failure information
|
||||
*/
|
||||
public static SnapshotShardFailure fromXContent(XContentParser parser) throws IOException {
|
||||
SnapshotShardFailure snapshotShardFailure = new SnapshotShardFailure();
|
||||
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
String index = null;
|
||||
String index_uuid = IndexMetaData.INDEX_UUID_NA_VALUE;
|
||||
int shardId = -1;
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
String currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token.isValue()) {
|
||||
if ("index".equals(currentFieldName)) {
|
||||
index = parser.text();
|
||||
} else if ("index_uuid".equals(currentFieldName)) {
|
||||
index_uuid = parser.text();
|
||||
} else if ("node_id".equals(currentFieldName)) {
|
||||
snapshotShardFailure.nodeId = parser.text();
|
||||
} else if ("reason".equals(currentFieldName)) {
|
||||
// Workaround for https://github.com/elastic/elasticsearch/issues/25878
|
||||
// Some old snapshot might still have null in shard failure reasons
|
||||
snapshotShardFailure.reason = parser.textOrNull();
|
||||
} else if ("shard_id".equals(currentFieldName)) {
|
||||
shardId = parser.intValue();
|
||||
} else if ("status".equals(currentFieldName)) {
|
||||
snapshotShardFailure.status = RestStatus.valueOf(parser.text());
|
||||
} else {
|
||||
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||
}
|
||||
if (index == null) {
|
||||
throw new ElasticsearchParseException("index name was not set");
|
||||
}
|
||||
if (shardId == -1) {
|
||||
throw new ElasticsearchParseException("index shard was not set");
|
||||
}
|
||||
snapshotShardFailure.shardId = new ShardId(index, index_uuid, shardId);
|
||||
// Workaround for https://github.com/elastic/elasticsearch/issues/25878
|
||||
// Some old snapshot might still have null in shard failure reasons
|
||||
if (snapshotShardFailure.reason == null) {
|
||||
snapshotShardFailure.reason = "";
|
||||
}
|
||||
return snapshotShardFailure;
|
||||
return SNAPSHOT_SHARD_FAILURE_PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -238,4 +260,23 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
builder.field("status", status.name());
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
SnapshotShardFailure that = (SnapshotShardFailure) o;
|
||||
// customized to account for discrepancies in shardId/Index toXContent/fromXContent related to uuid
|
||||
return shardId.id() == that.shardId.id() &&
|
||||
shardId.getIndexName().equals(shardId.getIndexName()) &&
|
||||
Objects.equals(reason, that.reason) &&
|
||||
Objects.equals(nodeId, that.nodeId) &&
|
||||
status.getStatus() == that.status.getStatus();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// customized to account for discrepancies in shardId/Index toXContent/fromXContent related to uuid
|
||||
return Objects.hash(shardId.id(), shardId.getIndexName(), reason, nodeId, status.getStatus());
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue