Merge branch 'master' into ccr
* master: [TEST] REST client request without leading '/' (#29471) Using ObjectParser in UpdateRequest (#29293) Prevent accidental changes of default values (#29528) [Docs] Add definitions to glossary (#29127) Avoid self-deadlock in the translog (#29520) Minor cleanup in NodeInfo.groovy Lazy configure build tasks that require older JDKs (#29519) Simplify snapshot check in root build file Make NodeInfo#nodeVersion strongly-typed as Version (#29515) Enable license header exclusions (#29379) Use proper Java version for BWC builds (#29493) Mute TranslogTests#testFatalIOExceptionsWhileWritingConcurrently Enable skipping fetching latest for BWC builds (#29497)
This commit is contained in:
commit
ed427953d6
|
@ -498,6 +498,13 @@ will contain your change.
|
||||||
. Push both branches to your remote repository.
|
. Push both branches to your remote repository.
|
||||||
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
|
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
|
||||||
|
|
||||||
|
== Skip fetching latest
|
||||||
|
|
||||||
|
For some BWC testing scenarios, you want to use the local clone of the
|
||||||
|
repository without fetching latest. For these use cases, you can set the system
|
||||||
|
property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip
|
||||||
|
fetching the latest from the remote.
|
||||||
|
|
||||||
== Test coverage analysis
|
== Test coverage analysis
|
||||||
|
|
||||||
Generating test coverage reports for Elasticsearch is currently not possible through Gradle.
|
Generating test coverage reports for Elasticsearch is currently not possible through Gradle.
|
||||||
|
|
|
@ -36,7 +36,7 @@ import java.security.MessageDigest
|
||||||
// common maven publishing configuration
|
// common maven publishing configuration
|
||||||
subprojects {
|
subprojects {
|
||||||
group = 'org.elasticsearch'
|
group = 'org.elasticsearch'
|
||||||
version = VersionProperties.elasticsearch
|
version = VersionProperties.elasticsearch.toString()
|
||||||
description = "Elasticsearch subproject ${project.path}"
|
description = "Elasticsearch subproject ${project.path}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ configure(subprojects.findAll { it.projectDir.toPath().startsWith(rootPath) }) {
|
||||||
* in a branch if there are only betas and rcs in the branch so we have
|
* in a branch if there are only betas and rcs in the branch so we have
|
||||||
* *something* to test against. */
|
* *something* to test against. */
|
||||||
VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
|
VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
|
||||||
if (versions.currentVersion.toString() != VersionProperties.elasticsearch) {
|
if (versions.currentVersion != VersionProperties.elasticsearch) {
|
||||||
throw new GradleException("The last version in Versions.java [${versions.currentVersion}] does not match " +
|
throw new GradleException("The last version in Versions.java [${versions.currentVersion}] does not match " +
|
||||||
"VersionProperties.elasticsearch [${VersionProperties.elasticsearch}]")
|
"VersionProperties.elasticsearch [${VersionProperties.elasticsearch}]")
|
||||||
}
|
}
|
||||||
|
@ -245,7 +245,7 @@ subprojects {
|
||||||
// other packages (e.g org.elasticsearch.client) will point to server rather than
|
// other packages (e.g org.elasticsearch.client) will point to server rather than
|
||||||
// their own artifacts.
|
// their own artifacts.
|
||||||
if (project.plugins.hasPlugin(BuildPlugin)) {
|
if (project.plugins.hasPlugin(BuildPlugin)) {
|
||||||
String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
|
String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
|
||||||
Closure sortClosure = { a, b -> b.group <=> a.group }
|
Closure sortClosure = { a, b -> b.group <=> a.group }
|
||||||
Closure depJavadocClosure = { dep ->
|
Closure depJavadocClosure = { dep ->
|
||||||
if (dep.group != null && dep.group.startsWith('org.elasticsearch')) {
|
if (dep.group != null && dep.group.startsWith('org.elasticsearch')) {
|
||||||
|
|
|
@ -97,6 +97,12 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
String compilerJavaHome = findCompilerJavaHome()
|
String compilerJavaHome = findCompilerJavaHome()
|
||||||
String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome)
|
String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome)
|
||||||
File gradleJavaHome = Jvm.current().javaHome
|
File gradleJavaHome = Jvm.current().javaHome
|
||||||
|
|
||||||
|
final Map<Integer, String> javaVersions = [:]
|
||||||
|
for (int version = 7; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) {
|
||||||
|
javaVersions.put(version, findJavaHome(version));
|
||||||
|
}
|
||||||
|
|
||||||
String javaVendor = System.getProperty('java.vendor')
|
String javaVendor = System.getProperty('java.vendor')
|
||||||
String javaVersion = System.getProperty('java.version')
|
String javaVersion = System.getProperty('java.version')
|
||||||
String gradleJavaVersionDetails = "${javaVendor} ${javaVersion}" +
|
String gradleJavaVersionDetails = "${javaVendor} ${javaVersion}" +
|
||||||
|
@ -158,10 +164,32 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
throw new GradleException(message)
|
throw new GradleException(message)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (final Map.Entry<Integer, String> javaVersionEntry : javaVersions.entrySet()) {
|
||||||
|
final String javaHome = javaVersionEntry.getValue()
|
||||||
|
if (javaHome == null) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome))
|
||||||
|
final JavaVersion expectedJavaVersionEnum
|
||||||
|
final int version = javaVersionEntry.getKey()
|
||||||
|
if (version < 9) {
|
||||||
|
expectedJavaVersionEnum = JavaVersion.toVersion("1." + version)
|
||||||
|
} else {
|
||||||
|
expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version))
|
||||||
|
}
|
||||||
|
if (javaVersionEnum != expectedJavaVersionEnum) {
|
||||||
|
final String message =
|
||||||
|
"the environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java" +
|
||||||
|
" ${expectedJavaVersionEnum} but is [${javaHome}] corresponding to [${javaVersionEnum}]"
|
||||||
|
throw new GradleException(message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
project.rootProject.ext.compilerJavaHome = compilerJavaHome
|
project.rootProject.ext.compilerJavaHome = compilerJavaHome
|
||||||
project.rootProject.ext.runtimeJavaHome = runtimeJavaHome
|
project.rootProject.ext.runtimeJavaHome = runtimeJavaHome
|
||||||
project.rootProject.ext.compilerJavaVersion = compilerJavaVersionEnum
|
project.rootProject.ext.compilerJavaVersion = compilerJavaVersionEnum
|
||||||
project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum
|
project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum
|
||||||
|
project.rootProject.ext.javaVersions = javaVersions
|
||||||
project.rootProject.ext.buildChecksDone = true
|
project.rootProject.ext.buildChecksDone = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,6 +201,7 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
project.ext.runtimeJavaHome = project.rootProject.ext.runtimeJavaHome
|
project.ext.runtimeJavaHome = project.rootProject.ext.runtimeJavaHome
|
||||||
project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion
|
project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion
|
||||||
project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion
|
project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion
|
||||||
|
project.ext.javaVersions = project.rootProject.ext.javaVersions
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String findCompilerJavaHome() {
|
private static String findCompilerJavaHome() {
|
||||||
|
@ -188,6 +217,27 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
return javaHome
|
return javaHome
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String findJavaHome(int version) {
|
||||||
|
return System.getenv('JAVA' + version + '_HOME')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Java home for the project for the specified version. If the specified version is not configured, an exception with the specified
|
||||||
|
* message is thrown.
|
||||||
|
*
|
||||||
|
* @param project the project
|
||||||
|
* @param version the version of Java home to obtain
|
||||||
|
* @param message the exception message if Java home for the specified version is not configured
|
||||||
|
* @return Java home for the specified version
|
||||||
|
* @throws GradleException if Java home for the specified version is not configured
|
||||||
|
*/
|
||||||
|
static String getJavaHome(final Project project, final int version, final String message) {
|
||||||
|
if (project.javaVersions.get(version) == null) {
|
||||||
|
throw new GradleException(message)
|
||||||
|
}
|
||||||
|
return project.javaVersions.get(version)
|
||||||
|
}
|
||||||
|
|
||||||
private static String findRuntimeJavaHome(final String compilerJavaHome) {
|
private static String findRuntimeJavaHome(final String compilerJavaHome) {
|
||||||
assert compilerJavaHome != null
|
assert compilerJavaHome != null
|
||||||
return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome
|
return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome
|
||||||
|
@ -517,17 +567,18 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
jarTask.destinationDir = new File(project.buildDir, 'distributions')
|
jarTask.destinationDir = new File(project.buildDir, 'distributions')
|
||||||
// fixup the jar manifest
|
// fixup the jar manifest
|
||||||
jarTask.doFirst {
|
jarTask.doFirst {
|
||||||
boolean isSnapshot = VersionProperties.elasticsearch.endsWith("-SNAPSHOT");
|
final Version versionWithoutSnapshot = new Version(
|
||||||
String version = VersionProperties.elasticsearch;
|
VersionProperties.elasticsearch.major,
|
||||||
if (isSnapshot) {
|
VersionProperties.elasticsearch.minor,
|
||||||
version = version.substring(0, version.length() - 9)
|
VersionProperties.elasticsearch.revision,
|
||||||
}
|
VersionProperties.elasticsearch.suffix,
|
||||||
|
false)
|
||||||
// this doFirst is added before the info plugin, therefore it will run
|
// this doFirst is added before the info plugin, therefore it will run
|
||||||
// after the doFirst added by the info plugin, and we can override attributes
|
// after the doFirst added by the info plugin, and we can override attributes
|
||||||
jarTask.manifest.attributes(
|
jarTask.manifest.attributes(
|
||||||
'X-Compile-Elasticsearch-Version': version,
|
'X-Compile-Elasticsearch-Version': versionWithoutSnapshot,
|
||||||
'X-Compile-Lucene-Version': VersionProperties.lucene,
|
'X-Compile-Lucene-Version': VersionProperties.lucene,
|
||||||
'X-Compile-Elasticsearch-Snapshot': isSnapshot,
|
'X-Compile-Elasticsearch-Snapshot': VersionProperties.elasticsearch.isSnapshot(),
|
||||||
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
|
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
|
||||||
'Build-Java-Version': project.compilerJavaVersion)
|
'Build-Java-Version': project.compilerJavaVersion)
|
||||||
if (jarTask.manifest.attributes.containsKey('Change') == false) {
|
if (jarTask.manifest.attributes.containsKey('Change') == false) {
|
||||||
|
|
|
@ -74,20 +74,36 @@ public class Version {
|
||||||
return "${major}.${minor}.${revision}${suffix}${snapshotStr}"
|
return "${major}.${minor}.${revision}${suffix}${snapshotStr}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean before(Version compareTo) {
|
||||||
|
return id < compareTo.id
|
||||||
|
}
|
||||||
|
|
||||||
public boolean before(String compareTo) {
|
public boolean before(String compareTo) {
|
||||||
return id < fromString(compareTo).id
|
return before(fromString(compareTo))
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean onOrBefore(Version compareTo) {
|
||||||
|
return id <= compareTo.id
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean onOrBefore(String compareTo) {
|
public boolean onOrBefore(String compareTo) {
|
||||||
return id <= fromString(compareTo).id
|
return onOrBefore(fromString(compareTo))
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean onOrAfter(Version compareTo) {
|
||||||
|
return id >= compareTo.id
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean onOrAfter(String compareTo) {
|
public boolean onOrAfter(String compareTo) {
|
||||||
return id >= fromString(compareTo).id
|
return onOrAfter(fromString(compareTo))
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean after(Version compareTo) {
|
||||||
|
return id > compareTo.id
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean after(String compareTo) {
|
public boolean after(String compareTo) {
|
||||||
return id > fromString(compareTo).id
|
return after(fromString(compareTo))
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean onOrBeforeIncludingSuffix(Version otherVersion) {
|
public boolean onOrBeforeIncludingSuffix(Version otherVersion) {
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.gradle
|
||||||
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
||||||
*/
|
*/
|
||||||
class VersionProperties {
|
class VersionProperties {
|
||||||
static final String elasticsearch
|
static final Version elasticsearch
|
||||||
static final String lucene
|
static final String lucene
|
||||||
static final Map<String, String> versions = new HashMap<>()
|
static final Map<String, String> versions = new HashMap<>()
|
||||||
static {
|
static {
|
||||||
|
@ -32,7 +32,7 @@ class VersionProperties {
|
||||||
throw new RuntimeException('/version.properties resource missing')
|
throw new RuntimeException('/version.properties resource missing')
|
||||||
}
|
}
|
||||||
props.load(propsStream)
|
props.load(propsStream)
|
||||||
elasticsearch = props.getProperty('elasticsearch')
|
elasticsearch = Version.fromString(props.getProperty('elasticsearch'))
|
||||||
lucene = props.getProperty('lucene')
|
lucene = props.getProperty('lucene')
|
||||||
for (String property : props.stringPropertyNames()) {
|
for (String property : props.stringPropertyNames()) {
|
||||||
versions.put(property, props.getProperty(property))
|
versions.put(property, props.getProperty(property))
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class DocsTestPlugin extends RestTestPlugin {
|
||||||
* to the version being built for testing but needs to resolve to
|
* to the version being built for testing but needs to resolve to
|
||||||
* the last released version for docs. */
|
* the last released version for docs. */
|
||||||
'\\{version\\}':
|
'\\{version\\}':
|
||||||
VersionProperties.elasticsearch.replace('-SNAPSHOT', ''),
|
VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''),
|
||||||
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
|
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
|
||||||
]
|
]
|
||||||
Task listSnippets = project.tasks.create('listSnippets', SnippetsTask)
|
Task listSnippets = project.tasks.create('listSnippets', SnippetsTask)
|
||||||
|
|
|
@ -77,7 +77,7 @@ class PluginPropertiesTask extends Copy {
|
||||||
'name': extension.name,
|
'name': extension.name,
|
||||||
'description': extension.description,
|
'description': extension.description,
|
||||||
'version': stringSnap(extension.version),
|
'version': stringSnap(extension.version),
|
||||||
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
|
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch.toString()),
|
||||||
'javaVersion': project.targetCompatibility as String,
|
'javaVersion': project.targetCompatibility as String,
|
||||||
'classname': extension.classname,
|
'classname': extension.classname,
|
||||||
'extendedPlugins': extension.extendedPlugins.join(','),
|
'extendedPlugins': extension.extendedPlugins.join(','),
|
||||||
|
|
|
@ -49,6 +49,14 @@ public class LicenseHeadersTask extends AntTask {
|
||||||
@Input
|
@Input
|
||||||
List<String> approvedLicenses = ['Apache', 'Generated']
|
List<String> approvedLicenses = ['Apache', 'Generated']
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Files that should be excluded from the license header check. Use with extreme care, only in situations where the license on the
|
||||||
|
* source file is compatible with the codebase but we do not want to add the license to the list of approved headers (to avoid the
|
||||||
|
* possibility of inadvertently using the license on our own source files).
|
||||||
|
*/
|
||||||
|
@Input
|
||||||
|
List<String> excludes = []
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Additional license families that may be found. The key is the license category name (5 characters),
|
* Additional license families that may be found. The key is the license category name (5 characters),
|
||||||
* followed by the family name and the value list of patterns to search for.
|
* followed by the family name and the value list of patterns to search for.
|
||||||
|
@ -95,7 +103,7 @@ public class LicenseHeadersTask extends AntTask {
|
||||||
for (File dir: dirSet.srcDirs) {
|
for (File dir: dirSet.srcDirs) {
|
||||||
// sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main...
|
// sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main...
|
||||||
if (dir.exists()) {
|
if (dir.exists()) {
|
||||||
ant.fileset(dir: dir)
|
ant.fileset(dir: dir, excludes: excludes.join(' '))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.gradle.test
|
package org.elasticsearch.gradle.test
|
||||||
|
|
||||||
|
import org.elasticsearch.gradle.Version
|
||||||
import org.gradle.api.GradleException
|
import org.gradle.api.GradleException
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.tasks.Input
|
import org.gradle.api.tasks.Input
|
||||||
|
@ -37,7 +38,7 @@ class ClusterConfiguration {
|
||||||
int numBwcNodes = 0
|
int numBwcNodes = 0
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
String bwcVersion = null
|
Version bwcVersion = null
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
int httpPort = 0
|
int httpPort = 0
|
||||||
|
|
|
@ -107,11 +107,14 @@ class ClusterFormationTasks {
|
||||||
for (int i = 0; i < config.numNodes; i++) {
|
for (int i = 0; i < config.numNodes; i++) {
|
||||||
// we start N nodes and out of these N nodes there might be M bwc nodes.
|
// we start N nodes and out of these N nodes there might be M bwc nodes.
|
||||||
// for each of those nodes we might have a different configuration
|
// for each of those nodes we might have a different configuration
|
||||||
String elasticsearchVersion = VersionProperties.elasticsearch
|
final Configuration distro
|
||||||
Configuration distro = currentDistro
|
final Version elasticsearchVersion
|
||||||
if (i < config.numBwcNodes) {
|
if (i < config.numBwcNodes) {
|
||||||
elasticsearchVersion = config.bwcVersion
|
elasticsearchVersion = config.bwcVersion
|
||||||
distro = bwcDistro
|
distro = bwcDistro
|
||||||
|
} else {
|
||||||
|
elasticsearchVersion = VersionProperties.elasticsearch
|
||||||
|
distro = currentDistro
|
||||||
}
|
}
|
||||||
NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir)
|
NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir)
|
||||||
nodes.add(node)
|
nodes.add(node)
|
||||||
|
@ -126,7 +129,7 @@ class ClusterFormationTasks {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Adds a dependency on the given distribution */
|
/** Adds a dependency on the given distribution */
|
||||||
static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
|
static void configureDistributionDependency(Project project, String distro, Configuration configuration, Version elasticsearchVersion) {
|
||||||
String packaging = distro
|
String packaging = distro
|
||||||
if (distro == 'tar') {
|
if (distro == 'tar') {
|
||||||
packaging = 'tar.gz'
|
packaging = 'tar.gz'
|
||||||
|
@ -137,7 +140,7 @@ class ClusterFormationTasks {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Adds a dependency on a different version of the given plugin, which will be retrieved using gradle's dependency resolution */
|
/** Adds a dependency on a different version of the given plugin, which will be retrieved using gradle's dependency resolution */
|
||||||
static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, String elasticsearchVersion) {
|
static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, Version elasticsearchVersion) {
|
||||||
verifyProjectHasBuildPlugin(name, elasticsearchVersion, project, pluginProject)
|
verifyProjectHasBuildPlugin(name, elasticsearchVersion, project, pluginProject)
|
||||||
final String pluginName = findPluginName(pluginProject)
|
final String pluginName = findPluginName(pluginProject)
|
||||||
project.dependencies.add(configuration.name, "org.elasticsearch.plugin:${pluginName}:${elasticsearchVersion}@zip")
|
project.dependencies.add(configuration.name, "org.elasticsearch.plugin:${pluginName}:${elasticsearchVersion}@zip")
|
||||||
|
@ -303,7 +306,7 @@ class ClusterFormationTasks {
|
||||||
// Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space
|
// Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space
|
||||||
esConfig['cluster.routing.allocation.disk.watermark.low'] = '1b'
|
esConfig['cluster.routing.allocation.disk.watermark.low'] = '1b'
|
||||||
esConfig['cluster.routing.allocation.disk.watermark.high'] = '1b'
|
esConfig['cluster.routing.allocation.disk.watermark.high'] = '1b'
|
||||||
if (Version.fromString(node.nodeVersion).major >= 6) {
|
if (node.nodeVersion.major >= 6) {
|
||||||
esConfig['cluster.routing.allocation.disk.watermark.flood_stage'] = '1b'
|
esConfig['cluster.routing.allocation.disk.watermark.flood_stage'] = '1b'
|
||||||
}
|
}
|
||||||
// increase script compilation limit since tests can rapid-fire script compilations
|
// increase script compilation limit since tests can rapid-fire script compilations
|
||||||
|
@ -803,7 +806,7 @@ class ClusterFormationTasks {
|
||||||
return retVal
|
return retVal
|
||||||
}
|
}
|
||||||
|
|
||||||
static void verifyProjectHasBuildPlugin(String name, String version, Project project, Project pluginProject) {
|
static void verifyProjectHasBuildPlugin(String name, Version version, Project project, Project pluginProject) {
|
||||||
if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) {
|
if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) {
|
||||||
throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " +
|
throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " +
|
||||||
"[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin")
|
"[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin")
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
* specific language governing permissions and limitations
|
* specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.gradle.test
|
package org.elasticsearch.gradle.test
|
||||||
|
|
||||||
import com.sun.jna.Native
|
import com.sun.jna.Native
|
||||||
|
@ -29,6 +30,8 @@ import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
import java.nio.file.Paths
|
import java.nio.file.Paths
|
||||||
|
|
||||||
|
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A container for the files and configuration associated with a single node in a test cluster.
|
* A container for the files and configuration associated with a single node in a test cluster.
|
||||||
*/
|
*/
|
||||||
|
@ -100,10 +103,10 @@ class NodeInfo {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
||||||
|
|
||||||
/** the version of elasticsearch that this node runs */
|
/** the version of elasticsearch that this node runs */
|
||||||
String nodeVersion
|
Version nodeVersion
|
||||||
|
|
||||||
/** Holds node configuration for part of a test cluster. */
|
/** Holds node configuration for part of a test cluster. */
|
||||||
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) {
|
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, Version nodeVersion, File sharedDir) {
|
||||||
this.config = config
|
this.config = config
|
||||||
this.nodeNum = nodeNum
|
this.nodeNum = nodeNum
|
||||||
this.sharedDir = sharedDir
|
this.sharedDir = sharedDir
|
||||||
|
@ -162,7 +165,14 @@ class NodeInfo {
|
||||||
args.add("${esScript}")
|
args.add("${esScript}")
|
||||||
}
|
}
|
||||||
|
|
||||||
env = ['JAVA_HOME': project.runtimeJavaHome]
|
if (nodeVersion.before("6.2.0")) {
|
||||||
|
env = ['JAVA_HOME': "${-> getJavaHome(project, 8, "JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"]
|
||||||
|
} else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) {
|
||||||
|
env = ['JAVA_HOME': "${-> getJavaHome(project, 9, "JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"]
|
||||||
|
} else {
|
||||||
|
env = ['JAVA_HOME': (String) project.runtimeJavaHome]
|
||||||
|
}
|
||||||
|
|
||||||
args.addAll("-E", "node.portsfile=true")
|
args.addAll("-E", "node.portsfile=true")
|
||||||
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
|
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
|
||||||
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
|
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
|
||||||
|
@ -284,7 +294,7 @@ class NodeInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
||||||
static File homeDir(File baseDir, String distro, String nodeVersion) {
|
static File homeDir(File baseDir, String distro, Version nodeVersion) {
|
||||||
String path
|
String path
|
||||||
switch (distro) {
|
switch (distro) {
|
||||||
case 'integ-test-zip':
|
case 'integ-test-zip':
|
||||||
|
@ -302,7 +312,7 @@ class NodeInfo {
|
||||||
return new File(baseDir, path)
|
return new File(baseDir, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
static File pathConf(File baseDir, String distro, String nodeVersion) {
|
static File pathConf(File baseDir, String distro, Version nodeVersion) {
|
||||||
switch (distro) {
|
switch (distro) {
|
||||||
case 'integ-test-zip':
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActiveShardCount;
|
||||||
import org.elasticsearch.action.update.UpdateResponse;
|
import org.elasticsearch.action.update.UpdateResponse;
|
||||||
import org.elasticsearch.client.Requests;
|
import org.elasticsearch.client.Requests;
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
@ -68,9 +67,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
|
||||||
String defaultIndex = request.param("index");
|
String defaultIndex = request.param("index");
|
||||||
String defaultType = request.param("type");
|
String defaultType = request.param("type");
|
||||||
String defaultRouting = request.param("routing");
|
String defaultRouting = request.param("routing");
|
||||||
String fieldsParam = request.param("fields");
|
|
||||||
String defaultPipeline = request.param("pipeline");
|
String defaultPipeline = request.param("pipeline");
|
||||||
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
|
|
||||||
|
|
||||||
String waitForActiveShards = request.param("wait_for_active_shards");
|
String waitForActiveShards = request.param("wait_for_active_shards");
|
||||||
if (waitForActiveShards != null) {
|
if (waitForActiveShards != null) {
|
||||||
|
@ -78,7 +75,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
|
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
|
||||||
bulkRequest.setRefreshPolicy(request.param("refresh"));
|
bulkRequest.setRefreshPolicy(request.param("refresh"));
|
||||||
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
|
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
|
||||||
null, defaultPipeline, null, true, request.getXContentType());
|
null, defaultPipeline, null, true, request.getXContentType());
|
||||||
|
|
||||||
// short circuit the call to the transport layer
|
// short circuit the call to the transport layer
|
||||||
|
|
|
@ -58,6 +58,7 @@ import static org.hamcrest.Matchers.startsWith;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertThat;
|
import static org.junit.Assert.assertThat;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}.
|
* Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}.
|
||||||
|
@ -135,8 +136,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
||||||
final RestClientBuilder restClientBuilder = RestClient.builder(
|
final RestClientBuilder restClientBuilder = RestClient.builder(
|
||||||
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
|
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
|
||||||
if (pathPrefix.length() > 0) {
|
if (pathPrefix.length() > 0) {
|
||||||
// sometimes cut off the leading slash
|
restClientBuilder.setPathPrefix(pathPrefix);
|
||||||
restClientBuilder.setPathPrefix(randomBoolean() ? pathPrefix.substring(1) : pathPrefix);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (useAuth) {
|
if (useAuth) {
|
||||||
|
@ -281,6 +281,33 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testUrlWithoutLeadingSlash() throws Exception {
|
||||||
|
if (pathPrefix.length() == 0) {
|
||||||
|
try {
|
||||||
|
restClient.performRequest("GET", "200");
|
||||||
|
fail("request should have failed");
|
||||||
|
} catch(ResponseException e) {
|
||||||
|
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
{
|
||||||
|
Response response = restClient.performRequest("GET", "200");
|
||||||
|
//a trailing slash gets automatically added if a pathPrefix is configured
|
||||||
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
|
}
|
||||||
|
{
|
||||||
|
//pathPrefix is not required to start with '/', will be added automatically
|
||||||
|
try (RestClient restClient = RestClient.builder(
|
||||||
|
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()))
|
||||||
|
.setPathPrefix(pathPrefix.substring(1)).build()) {
|
||||||
|
Response response = restClient.performRequest("GET", "200");
|
||||||
|
//a trailing slash gets automatically added if a pathPrefix is configured
|
||||||
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private Response bodyTest(final String method) throws IOException {
|
private Response bodyTest(final String method) throws IOException {
|
||||||
return bodyTest(restClient, method);
|
return bodyTest(restClient, method);
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
import org.apache.tools.ant.taskdefs.condition.Os
|
import org.apache.tools.ant.taskdefs.condition.Os
|
||||||
import org.elasticsearch.gradle.LoggedExec
|
import org.elasticsearch.gradle.LoggedExec
|
||||||
import org.elasticsearch.gradle.Version
|
import org.elasticsearch.gradle.Version
|
||||||
|
|
||||||
import java.util.regex.Matcher
|
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a dummy project which does a local checkout of the previous
|
* This is a dummy project which does a local checkout of the previous
|
||||||
|
@ -54,6 +55,16 @@ subprojects {
|
||||||
|
|
||||||
final String remote = System.getProperty("tests.bwc.remote", "elastic")
|
final String remote = System.getProperty("tests.bwc.remote", "elastic")
|
||||||
|
|
||||||
|
final boolean gitFetchLatest
|
||||||
|
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
|
||||||
|
if ("true".equals(gitFetchLatestProperty)) {
|
||||||
|
gitFetchLatest = true
|
||||||
|
} else if ("false".equals(gitFetchLatestProperty)) {
|
||||||
|
gitFetchLatest = false
|
||||||
|
} else {
|
||||||
|
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
|
||||||
|
}
|
||||||
|
|
||||||
task createClone(type: LoggedExec) {
|
task createClone(type: LoggedExec) {
|
||||||
onlyIf { checkoutDir.exists() == false }
|
onlyIf { checkoutDir.exists() == false }
|
||||||
commandLine = ['git', 'clone', rootDir, checkoutDir]
|
commandLine = ['git', 'clone', rootDir, checkoutDir]
|
||||||
|
@ -83,7 +94,7 @@ subprojects {
|
||||||
}
|
}
|
||||||
|
|
||||||
task fetchLatest(type: LoggedExec) {
|
task fetchLatest(type: LoggedExec) {
|
||||||
onlyIf { project.gradle.startParameter.isOffline() == false }
|
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
|
||||||
dependsOn addRemote
|
dependsOn addRemote
|
||||||
workingDir = checkoutDir
|
workingDir = checkoutDir
|
||||||
commandLine = ['git', 'fetch', '--all']
|
commandLine = ['git', 'fetch', '--all']
|
||||||
|
@ -134,12 +145,13 @@ subprojects {
|
||||||
task buildBwcVersion(type: Exec) {
|
task buildBwcVersion(type: Exec) {
|
||||||
dependsOn checkoutBwcBranch, writeBuildMetadata
|
dependsOn checkoutBwcBranch, writeBuildMetadata
|
||||||
workingDir = checkoutDir
|
workingDir = checkoutDir
|
||||||
if (project.rootProject.ext.runtimeJavaVersion == JavaVersion.VERSION_1_8 && ["5.6", "6.0", "6.1"].contains(bwcBranch)) {
|
if (["5.6", "6.0", "6.1"].contains(bwcBranch)) {
|
||||||
/*
|
// we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds
|
||||||
* If runtime Java home is set to JDK 8 and we are building branches that are officially built with JDK 8, push this to JAVA_HOME for
|
environment('JAVA_HOME', "${-> getJavaHome(project, 8, "JAVA8_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}")
|
||||||
* these builds.
|
} else if ("6.2".equals(bwcBranch)) {
|
||||||
*/
|
environment('JAVA_HOME', "${-> getJavaHome(project, 9, "JAVA9_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}")
|
||||||
environment('JAVA_HOME', System.getenv('RUNTIME_JAVA_HOME'))
|
} else {
|
||||||
|
environment('JAVA_HOME', project.compilerJavaHome)
|
||||||
}
|
}
|
||||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||||
executable 'cmd'
|
executable 'cmd'
|
||||||
|
|
|
@ -61,6 +61,15 @@
|
||||||
`object`. The mapping also allows you to define (amongst other things)
|
`object`. The mapping also allows you to define (amongst other things)
|
||||||
how the value for a field should be analyzed.
|
how the value for a field should be analyzed.
|
||||||
|
|
||||||
|
[[glossary-filter]] filter ::
|
||||||
|
|
||||||
|
A filter is a non-scoring <<glossary-query,query>>, meaning that it does not score documents.
|
||||||
|
It is only concerned about answering the question - "Does this document match?".
|
||||||
|
The answer is always a simple, binary yes or no. This kind of query is said to be made
|
||||||
|
in a <<query-filter-context,filter context>>,
|
||||||
|
hence it is called a filter. Filters are simple checks for set inclusion or exclusion.
|
||||||
|
In most cases, the goal of filtering is to reduce the number of documents that have to be examined.
|
||||||
|
|
||||||
[[glossary-index]] index ::
|
[[glossary-index]] index ::
|
||||||
|
|
||||||
An index is like a _table_ in a relational database. It has a
|
An index is like a _table_ in a relational database. It has a
|
||||||
|
@ -105,6 +114,16 @@
|
||||||
+
|
+
|
||||||
See also <<glossary-routing,routing>>
|
See also <<glossary-routing,routing>>
|
||||||
|
|
||||||
|
[[glossary-query]] query ::
|
||||||
|
|
||||||
|
A query is the basic component of a search. A search can be defined by one or more queries
|
||||||
|
which can be mixed and matched in endless combinations. While <<glossary-filter,filters>> are
|
||||||
|
queries that only determine if a document matches, those queries that also calculate how well
|
||||||
|
the document matches are known as "scoring queries". Those queries assign it a score, which is
|
||||||
|
later used to sort matched documents. Scoring queries take more resources than <<glossary-filter,non scoring queries>>
|
||||||
|
and their query results are not cacheable. As a general rule, use query clauses for full-text
|
||||||
|
search or for any condition that requires scoring, and use filters for everything else.
|
||||||
|
|
||||||
[[glossary-replica-shard]] replica shard ::
|
[[glossary-replica-shard]] replica shard ::
|
||||||
|
|
||||||
Each <<glossary-primary-shard,primary shard>> can have zero or more
|
Each <<glossary-primary-shard,primary shard>> can have zero or more
|
||||||
|
@ -161,8 +180,9 @@
|
||||||
|
|
||||||
A term is an exact value that is indexed in Elasticsearch. The terms
|
A term is an exact value that is indexed in Elasticsearch. The terms
|
||||||
`foo`, `Foo`, `FOO` are NOT equivalent. Terms (i.e. exact values) can
|
`foo`, `Foo`, `FOO` are NOT equivalent. Terms (i.e. exact values) can
|
||||||
be searched for using _term_ queries. +
|
be searched for using _term_ queries.
|
||||||
See also <<glossary-text,text>> and <<glossary-analysis,analysis>>.
|
+
|
||||||
|
See also <<glossary-text,text>> and <<glossary-analysis,analysis>>.
|
||||||
|
|
||||||
[[glossary-text]] text ::
|
[[glossary-text]] text ::
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
=== Breaking API changes in 7.0
|
=== Breaking API changes in 7.0
|
||||||
|
|
||||||
==== Camel case and underscore parameters deprecated in 6.x have been removed
|
==== Camel case and underscore parameters deprecated in 6.x have been removed
|
||||||
A number of duplicate parameters deprecated in 6.x have been removed from
|
A number of duplicate parameters deprecated in 6.x have been removed from
|
||||||
Bulk request, Multi Get request, Term Vectors request, and More Like This Query
|
Bulk request, Multi Get request, Term Vectors request, and More Like This Query
|
||||||
requests.
|
requests.
|
||||||
|
|
||||||
|
@ -22,3 +22,7 @@ The following parameters starting with underscore have been removed:
|
||||||
Instead of these removed parameters, use their non camel case equivalents without
|
Instead of these removed parameters, use their non camel case equivalents without
|
||||||
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.
|
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.
|
||||||
|
|
||||||
|
|
||||||
|
==== The parameter `fields` deprecated in 6.x has been removed from Bulk request
|
||||||
|
and Update request. The Update API returns `400 - Bad request` if request contains
|
||||||
|
unknown parameters (instead of ignored in the previous version).
|
||||||
|
|
|
@ -24,8 +24,11 @@ should be able to use the standard launching mechanism which
|
||||||
is more flexible and reliable.
|
is more flexible and reliable.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
import org.apache.tools.ant.taskdefs.condition.Os
|
import org.apache.tools.ant.taskdefs.condition.Os
|
||||||
|
|
||||||
|
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||||
|
|
||||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
|
@ -51,9 +54,8 @@ dependencies {
|
||||||
es090 'org.elasticsearch:elasticsearch:0.90.13@zip'
|
es090 'org.elasticsearch:elasticsearch:0.90.13@zip'
|
||||||
}
|
}
|
||||||
|
|
||||||
if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 || Os.isFamily(Os.FAMILY_WINDOWS)) {
|
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||||
/* We can't run the dependencies with Java 9 so for now we'll skip the whole
|
// we can't get the pid files in windows so we skip that
|
||||||
* thing. We can't get the pid files in windows so we skip that as well.... */
|
|
||||||
integTest.enabled = false
|
integTest.enabled = false
|
||||||
} else {
|
} else {
|
||||||
/* Set up tasks to unzip and run the old versions of ES before running the
|
/* Set up tasks to unzip and run the old versions of ES before running the
|
||||||
|
@ -75,7 +77,7 @@ if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 || Os.isFamily(Os.FAMI
|
||||||
dependsOn unzip
|
dependsOn unzip
|
||||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||||
env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }"
|
env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }"
|
||||||
env 'JAVA_HOME', project.runtimeJavaHome
|
env 'JAVA_HOME', "${-> getJavaHome(project, 7, "JAVA7_HOME must be set to run reindex-from-old")}"
|
||||||
args 'oldes.OldElasticsearch',
|
args 'oldes.OldElasticsearch',
|
||||||
baseDir,
|
baseDir,
|
||||||
unzip.temporaryDir,
|
unzip.temporaryDir,
|
||||||
|
|
|
@ -37,10 +37,6 @@
|
||||||
"type" : "string",
|
"type" : "string",
|
||||||
"description" : "Default document type for items which don't provide one"
|
"description" : "Default document type for items which don't provide one"
|
||||||
},
|
},
|
||||||
"fields": {
|
|
||||||
"type": "list",
|
|
||||||
"description" : "Default comma-separated list of fields to return in the response for updates, can be overridden on each sub-request"
|
|
||||||
},
|
|
||||||
"_source": {
|
"_source": {
|
||||||
"type" : "list",
|
"type" : "list",
|
||||||
"description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request"
|
"description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request"
|
||||||
|
|
|
@ -27,10 +27,6 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Sets the number of shard copies that must be active before proceeding with the update operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)"
|
"description": "Sets the number of shard copies that must be active before proceeding with the update operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)"
|
||||||
},
|
},
|
||||||
"fields": {
|
|
||||||
"type": "list",
|
|
||||||
"description": "A comma-separated list of fields to return in the response"
|
|
||||||
},
|
|
||||||
"_source": {
|
"_source": {
|
||||||
"type" : "list",
|
"type" : "list",
|
||||||
"description" : "True or false to return the _source field or not, or a list of fields to return"
|
"description" : "True or false to return the _source field or not, or a list of fields to return"
|
||||||
|
|
|
@ -299,7 +299,7 @@ public class BulkProcessor implements Closeable {
|
||||||
*/
|
*/
|
||||||
public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
||||||
@Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception {
|
@Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception {
|
||||||
bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true, xContentType);
|
bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true, xContentType);
|
||||||
executeIfNeeded();
|
executeIfNeeded();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,8 +36,6 @@ import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||||
|
@ -66,8 +64,6 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||||
* @see org.elasticsearch.client.Client#bulk(BulkRequest)
|
* @see org.elasticsearch.client.Client#bulk(BulkRequest)
|
||||||
*/
|
*/
|
||||||
public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest<BulkRequest> {
|
public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest<BulkRequest> {
|
||||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
|
||||||
new DeprecationLogger(Loggers.getLogger(BulkRequest.class));
|
|
||||||
|
|
||||||
private static final int REQUEST_OVERHEAD = 50;
|
private static final int REQUEST_OVERHEAD = 50;
|
||||||
|
|
||||||
|
@ -80,7 +76,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
||||||
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
|
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
|
||||||
private static final ParseField PIPELINE = new ParseField("pipeline");
|
private static final ParseField PIPELINE = new ParseField("pipeline");
|
||||||
private static final ParseField FIELDS = new ParseField("fields");
|
|
||||||
private static final ParseField SOURCE = new ParseField("_source");
|
private static final ParseField SOURCE = new ParseField("_source");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -277,7 +272,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
*/
|
*/
|
||||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
||||||
XContentType xContentType) throws IOException {
|
XContentType xContentType) throws IOException {
|
||||||
return add(data, defaultIndex, defaultType, null, null, null, null, null, true, xContentType);
|
return add(data, defaultIndex, defaultType, null, null, null, null, true, xContentType);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -285,12 +280,13 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
*/
|
*/
|
||||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex,
|
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex,
|
||||||
XContentType xContentType) throws IOException {
|
XContentType xContentType) throws IOException {
|
||||||
return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex, xContentType);
|
return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex, xContentType);
|
||||||
}
|
}
|
||||||
|
|
||||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String
|
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
||||||
defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String
|
@Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext,
|
||||||
defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, XContentType xContentType) throws IOException {
|
@Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex,
|
||||||
|
XContentType xContentType) throws IOException {
|
||||||
XContent xContent = xContentType.xContent();
|
XContent xContent = xContentType.xContent();
|
||||||
int line = 0;
|
int line = 0;
|
||||||
int from = 0;
|
int from = 0;
|
||||||
|
@ -333,7 +329,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
String id = null;
|
String id = null;
|
||||||
String routing = defaultRouting;
|
String routing = defaultRouting;
|
||||||
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
|
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
|
||||||
String[] fields = defaultFields;
|
|
||||||
String opType = null;
|
String opType = null;
|
||||||
long version = Versions.MATCH_ANY;
|
long version = Versions.MATCH_ANY;
|
||||||
VersionType versionType = VersionType.INTERNAL;
|
VersionType versionType = VersionType.INTERNAL;
|
||||||
|
@ -371,21 +366,14 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
retryOnConflict = parser.intValue();
|
retryOnConflict = parser.intValue();
|
||||||
} else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) {
|
} else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||||
pipeline = parser.text();
|
pipeline = parser.text();
|
||||||
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
|
|
||||||
} else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
|
} else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
|
throw new IllegalArgumentException("Malformed action/metadata line [" + line +
|
||||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
"], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||||
List<Object> values = parser.list();
|
|
||||||
fields = values.toArray(new String[values.size()]);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
|
||||||
}
|
|
||||||
} else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
|
} else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
} else if (token != XContentParser.Token.VALUE_NULL) {
|
} else if (token != XContentParser.Token.VALUE_NULL) {
|
||||||
|
@ -435,10 +423,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
if (fetchSourceContext != null) {
|
if (fetchSourceContext != null) {
|
||||||
updateRequest.fetchSource(fetchSourceContext);
|
updateRequest.fetchSource(fetchSourceContext);
|
||||||
}
|
}
|
||||||
if (fields != null) {
|
|
||||||
updateRequest.fields(fields);
|
|
||||||
}
|
|
||||||
|
|
||||||
IndexRequest upsertRequest = updateRequest.upsertRequest();
|
IndexRequest upsertRequest = updateRequest.upsertRequest();
|
||||||
if (upsertRequest != null) {
|
if (upsertRequest != null) {
|
||||||
upsertRequest.version(version);
|
upsertRequest.version(version);
|
||||||
|
|
|
@ -291,8 +291,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
||||||
indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(),
|
indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(),
|
||||||
indexResponse.getResult());
|
indexResponse.getResult());
|
||||||
|
|
||||||
if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) ||
|
if (updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) {
|
||||||
(updateRequest.fields() != null && updateRequest.fields().length > 0)) {
|
|
||||||
final BytesReference indexSourceAsBytes = updateIndexRequest.source();
|
final BytesReference indexSourceAsBytes = updateIndexRequest.source();
|
||||||
final Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
final Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
||||||
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());
|
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());
|
||||||
|
|
|
@ -180,8 +180,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
||||||
bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse(
|
bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse(
|
||||||
ActionListener.<IndexResponse>wrap(response -> {
|
ActionListener.<IndexResponse>wrap(response -> {
|
||||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult());
|
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult());
|
||||||
if ((request.fetchSource() != null && request.fetchSource().fetchSource()) ||
|
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
|
||||||
(request.fields() != null && request.fields().length > 0)) {
|
|
||||||
Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
||||||
XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType());
|
XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType());
|
||||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
|
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.document.DocumentField;
|
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -49,7 +48,7 @@ import org.elasticsearch.script.ScriptService;
|
||||||
import org.elasticsearch.search.lookup.SourceLookup;
|
import org.elasticsearch.search.lookup.SourceLookup;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.LongSupplier;
|
import java.util.function.LongSupplier;
|
||||||
|
@ -292,61 +291,33 @@ public class UpdateHelper extends AbstractComponent {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
|
* Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
|
||||||
* For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response
|
|
||||||
*/
|
*/
|
||||||
public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version,
|
public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version,
|
||||||
final Map<String, Object> source, XContentType sourceContentType,
|
final Map<String, Object> source, XContentType sourceContentType,
|
||||||
@Nullable final BytesReference sourceAsBytes) {
|
@Nullable final BytesReference sourceAsBytes) {
|
||||||
if ((request.fields() == null || request.fields().length == 0) &&
|
if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) {
|
||||||
(request.fetchSource() == null || request.fetchSource().fetchSource() == false)) {
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
SourceLookup sourceLookup = new SourceLookup();
|
|
||||||
sourceLookup.setSource(source);
|
|
||||||
boolean sourceRequested = false;
|
|
||||||
Map<String, DocumentField> fields = null;
|
|
||||||
if (request.fields() != null && request.fields().length > 0) {
|
|
||||||
for (String field : request.fields()) {
|
|
||||||
if (field.equals("_source")) {
|
|
||||||
sourceRequested = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Object value = sourceLookup.extractValue(field);
|
|
||||||
if (value != null) {
|
|
||||||
if (fields == null) {
|
|
||||||
fields = new HashMap<>(2);
|
|
||||||
}
|
|
||||||
DocumentField documentField = fields.get(field);
|
|
||||||
if (documentField == null) {
|
|
||||||
documentField = new DocumentField(field, new ArrayList<>(2));
|
|
||||||
fields.put(field, documentField);
|
|
||||||
}
|
|
||||||
documentField.getValues().add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
BytesReference sourceFilteredAsBytes = sourceAsBytes;
|
BytesReference sourceFilteredAsBytes = sourceAsBytes;
|
||||||
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
|
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
|
||||||
sourceRequested = true;
|
SourceLookup sourceLookup = new SourceLookup();
|
||||||
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
|
sourceLookup.setSource(source);
|
||||||
Object value = sourceLookup.filter(request.fetchSource());
|
Object value = sourceLookup.filter(request.fetchSource());
|
||||||
try {
|
try {
|
||||||
final int initialCapacity = Math.min(1024, sourceAsBytes.length());
|
final int initialCapacity = Math.min(1024, sourceAsBytes.length());
|
||||||
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
|
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
|
||||||
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
|
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
|
||||||
builder.value(value);
|
builder.value(value);
|
||||||
sourceFilteredAsBytes = BytesReference.bytes(builder);
|
sourceFilteredAsBytes = BytesReference.bytes(builder);
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ElasticsearchException("Error filtering source", e);
|
|
||||||
}
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ElasticsearchException("Error filtering source", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
|
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
|
||||||
return new GetResult(concreteIndex, request.type(), request.id(), version, true,
|
return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap());
|
||||||
sourceRequested ? sourceFilteredAsBytes : null, fields);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Result {
|
public static class Result {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.update;
|
package org.elasticsearch.action.update;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
|
@ -30,11 +28,14 @@ import org.elasticsearch.action.support.WriteRequest;
|
||||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
@ -48,15 +49,46 @@ import org.elasticsearch.script.ScriptType;
|
||||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||||
|
|
||||||
public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
implements DocWriteRequest<UpdateRequest>, WriteRequest<UpdateRequest>, ToXContentObject {
|
implements DocWriteRequest<UpdateRequest>, WriteRequest<UpdateRequest>, ToXContentObject {
|
||||||
|
private static ObjectParser<UpdateRequest, Void> PARSER;
|
||||||
|
|
||||||
|
private static final ParseField SCRIPT_FIELD = new ParseField("script");
|
||||||
|
private static final ParseField SCRIPTED_UPSERT_FIELD = new ParseField("scripted_upsert");
|
||||||
|
private static final ParseField UPSERT_FIELD = new ParseField("upsert");
|
||||||
|
private static final ParseField DOC_FIELD = new ParseField("doc");
|
||||||
|
private static final ParseField DOC_AS_UPSERT_FIELD = new ParseField("doc_as_upsert");
|
||||||
|
private static final ParseField DETECT_NOOP_FIELD = new ParseField("detect_noop");
|
||||||
|
private static final ParseField SOURCE_FIELD = new ParseField("_source");
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER = new ObjectParser<>(UpdateRequest.class.getSimpleName());
|
||||||
|
PARSER.declareField((request, script) -> request.script = script,
|
||||||
|
(parser, context) -> Script.parse(parser), SCRIPT_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
|
||||||
|
PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD);
|
||||||
|
PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder),
|
||||||
|
(parser, context) -> {
|
||||||
|
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||||
|
builder.copyCurrentStructure(parser);
|
||||||
|
return builder;
|
||||||
|
}, UPSERT_FIELD);
|
||||||
|
PARSER.declareObject((request, builder) -> request.safeDoc().source(builder),
|
||||||
|
(parser, context) -> {
|
||||||
|
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
|
||||||
|
docBuilder.copyCurrentStructure(parser);
|
||||||
|
return docBuilder;
|
||||||
|
}, DOC_FIELD);
|
||||||
|
PARSER.declareBoolean(UpdateRequest::docAsUpsert, DOC_AS_UPSERT_FIELD);
|
||||||
|
PARSER.declareBoolean(UpdateRequest::detectNoop, DETECT_NOOP_FIELD);
|
||||||
|
PARSER.declareField(UpdateRequest::fetchSource,
|
||||||
|
(parser, context) -> FetchSourceContext.fromXContent(parser), SOURCE_FIELD,
|
||||||
|
ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING);
|
||||||
|
}
|
||||||
|
|
||||||
private String type;
|
private String type;
|
||||||
private String id;
|
private String id;
|
||||||
|
@ -66,7 +98,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
@Nullable
|
@Nullable
|
||||||
Script script;
|
Script script;
|
||||||
|
|
||||||
private String[] fields;
|
|
||||||
private FetchSourceContext fetchSourceContext;
|
private FetchSourceContext fetchSourceContext;
|
||||||
|
|
||||||
private long version = Versions.MATCH_ANY;
|
private long version = Versions.MATCH_ANY;
|
||||||
|
@ -365,16 +396,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Explicitly specify the fields that will be returned. By default, nothing is returned.
|
|
||||||
* @deprecated Use {@link UpdateRequest#fetchSource(String[], String[])} instead
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public UpdateRequest fields(String... fields) {
|
|
||||||
this.fields = fields;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicate that _source should be returned with every hit, with an
|
* Indicate that _source should be returned with every hit, with an
|
||||||
* "include" and/or "exclude" set which can include simple wildcard
|
* "include" and/or "exclude" set which can include simple wildcard
|
||||||
|
@ -389,7 +410,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
*/
|
*/
|
||||||
public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) {
|
public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) {
|
||||||
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
|
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
|
||||||
this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), new String[] {include}, new String[]{exclude});
|
String[] includes = include == null ? Strings.EMPTY_ARRAY : new String[]{include};
|
||||||
|
String[] excludes = exclude == null ? Strings.EMPTY_ARRAY : new String[]{exclude};
|
||||||
|
this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), includes, excludes);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -428,16 +451,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the fields to be returned.
|
|
||||||
* @deprecated Use {@link UpdateRequest#fetchSource()} instead
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public String[] fields() {
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the {@link FetchSourceContext} which defines how the _source should
|
* Gets the {@link FetchSourceContext} which defines how the _source should
|
||||||
* be fetched.
|
* be fetched.
|
||||||
|
@ -707,49 +720,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
}
|
}
|
||||||
|
|
||||||
public UpdateRequest fromXContent(XContentParser parser) throws IOException {
|
public UpdateRequest fromXContent(XContentParser parser) throws IOException {
|
||||||
Script script = null;
|
return PARSER.parse(parser, this, null);
|
||||||
XContentParser.Token token = parser.nextToken();
|
|
||||||
if (token == null) {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
String currentFieldName = null;
|
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
|
||||||
currentFieldName = parser.currentName();
|
|
||||||
} else if ("script".equals(currentFieldName)) {
|
|
||||||
script = Script.parse(parser);
|
|
||||||
} else if ("scripted_upsert".equals(currentFieldName)) {
|
|
||||||
scriptedUpsert = parser.booleanValue();
|
|
||||||
} else if ("upsert".equals(currentFieldName)) {
|
|
||||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
|
||||||
builder.copyCurrentStructure(parser);
|
|
||||||
safeUpsertRequest().source(builder);
|
|
||||||
} else if ("doc".equals(currentFieldName)) {
|
|
||||||
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
|
|
||||||
docBuilder.copyCurrentStructure(parser);
|
|
||||||
safeDoc().source(docBuilder);
|
|
||||||
} else if ("doc_as_upsert".equals(currentFieldName)) {
|
|
||||||
docAsUpsert(parser.booleanValue());
|
|
||||||
} else if ("detect_noop".equals(currentFieldName)) {
|
|
||||||
detectNoop(parser.booleanValue());
|
|
||||||
} else if ("fields".equals(currentFieldName)) {
|
|
||||||
List<Object> fields = null;
|
|
||||||
if (token == XContentParser.Token.START_ARRAY) {
|
|
||||||
fields = (List) parser.list();
|
|
||||||
} else if (token.isValue()) {
|
|
||||||
fields = Collections.singletonList(parser.text());
|
|
||||||
}
|
|
||||||
if (fields != null) {
|
|
||||||
fields(fields.toArray(new String[fields.size()]));
|
|
||||||
}
|
|
||||||
} else if ("_source".equals(currentFieldName)) {
|
|
||||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (script != null) {
|
|
||||||
this.script = script;
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean docAsUpsert() {
|
public boolean docAsUpsert() {
|
||||||
|
@ -789,7 +760,12 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
doc = new IndexRequest();
|
doc = new IndexRequest();
|
||||||
doc.readFrom(in);
|
doc.readFrom(in);
|
||||||
}
|
}
|
||||||
fields = in.readOptionalStringArray();
|
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||||
|
String[] fields = in.readOptionalStringArray();
|
||||||
|
if (fields != null) {
|
||||||
|
throw new IllegalArgumentException("[fields] is no longer supported");
|
||||||
|
}
|
||||||
|
}
|
||||||
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
|
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
upsertRequest = new IndexRequest();
|
upsertRequest = new IndexRequest();
|
||||||
|
@ -830,7 +806,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
doc.id(id);
|
doc.id(id);
|
||||||
doc.writeTo(out);
|
doc.writeTo(out);
|
||||||
}
|
}
|
||||||
out.writeOptionalStringArray(fields);
|
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||||
|
out.writeOptionalStringArray(null);
|
||||||
|
}
|
||||||
out.writeOptionalWriteable(fetchSourceContext);
|
out.writeOptionalWriteable(fetchSourceContext);
|
||||||
if (upsertRequest == null) {
|
if (upsertRequest == null) {
|
||||||
out.writeBoolean(false);
|
out.writeBoolean(false);
|
||||||
|
@ -880,9 +858,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
if (detectNoop == false) {
|
if (detectNoop == false) {
|
||||||
builder.field("detect_noop", detectNoop);
|
builder.field("detect_noop", detectNoop);
|
||||||
}
|
}
|
||||||
if (fields != null) {
|
|
||||||
builder.array("fields", fields);
|
|
||||||
}
|
|
||||||
if (fetchSourceContext != null) {
|
if (fetchSourceContext != null) {
|
||||||
builder.field("_source", fetchSourceContext);
|
builder.field("_source", fetchSourceContext);
|
||||||
}
|
}
|
||||||
|
@ -908,9 +883,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
}
|
}
|
||||||
res.append(", scripted_upsert[").append(scriptedUpsert).append("]");
|
res.append(", scripted_upsert[").append(scriptedUpsert).append("]");
|
||||||
res.append(", detect_noop[").append(detectNoop).append("]");
|
res.append(", detect_noop[").append(detectNoop).append("]");
|
||||||
if (fields != null) {
|
|
||||||
res.append(", fields[").append(Arrays.toString(fields)).append("]");
|
|
||||||
}
|
|
||||||
return res.append("}").toString();
|
return res.append("}").toString();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,20 +26,15 @@ import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder;
|
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder;
|
||||||
import org.elasticsearch.client.ElasticsearchClient;
|
import org.elasticsearch.client.ElasticsearchClient;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.rest.action.document.RestUpdateAction;
|
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<UpdateRequest, UpdateResponse, UpdateRequestBuilder>
|
public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<UpdateRequest, UpdateResponse, UpdateRequestBuilder>
|
||||||
implements WriteRequestBuilder<UpdateRequestBuilder> {
|
implements WriteRequestBuilder<UpdateRequestBuilder> {
|
||||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
|
||||||
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
|
|
||||||
|
|
||||||
public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) {
|
public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) {
|
||||||
super(client, action, new UpdateRequest());
|
super(client, action, new UpdateRequest());
|
||||||
|
@ -87,17 +82,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Explicitly specify the fields that will be returned. By default, nothing is returned.
|
|
||||||
* @deprecated Use {@link UpdateRequestBuilder#setFetchSource(String[], String[])} instead
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public UpdateRequestBuilder setFields(String... fields) {
|
|
||||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
|
||||||
request.fields(fields);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicate that _source should be returned with every hit, with an
|
* Indicate that _source should be returned with every hit, with an
|
||||||
* "include" and/or "exclude" set which can include simple wildcard
|
* "include" and/or "exclude" set which can include simple wildcard
|
||||||
|
|
|
@ -585,12 +585,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||||
if (current.generation == location.generation) {
|
if (current.generation == location.generation) {
|
||||||
// no need to fsync here the read operation will ensure that buffers are written to disk
|
// no need to fsync here the read operation will ensure that buffers are written to disk
|
||||||
// if they are still in RAM and we are reading onto that position
|
// if they are still in RAM and we are reading onto that position
|
||||||
try {
|
return current.read(location);
|
||||||
return current.read(location);
|
|
||||||
} catch (final Exception ex) {
|
|
||||||
closeOnTragicEvent(ex);
|
|
||||||
throw ex;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// read backwards - it's likely we need to read on that is recent
|
// read backwards - it's likely we need to read on that is recent
|
||||||
for (int i = readers.size() - 1; i >= 0; i--) {
|
for (int i = readers.size() - 1; i >= 0; i--) {
|
||||||
|
@ -600,6 +595,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (final Exception ex) {
|
||||||
|
closeOnTragicEvent(ex);
|
||||||
|
throw ex;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -749,15 +747,28 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Closes the translog if the current translog writer experienced a tragic exception.
|
||||||
|
*
|
||||||
|
* Note that in case this thread closes the translog it must not already be holding a read lock on the translog as it will acquire a
|
||||||
|
* write lock in the course of closing the translog
|
||||||
|
*
|
||||||
|
* @param ex if an exception occurs closing the translog, it will be suppressed into the provided exception
|
||||||
|
*/
|
||||||
private void closeOnTragicEvent(final Exception ex) {
|
private void closeOnTragicEvent(final Exception ex) {
|
||||||
|
// we can not hold a read lock here because closing will attempt to obtain a write lock and that would result in self-deadlock
|
||||||
|
assert readLock.isHeldByCurrentThread() == false : Thread.currentThread().getName();
|
||||||
if (current.getTragicException() != null) {
|
if (current.getTragicException() != null) {
|
||||||
try {
|
try {
|
||||||
close();
|
close();
|
||||||
} catch (final AlreadyClosedException inner) {
|
} catch (final AlreadyClosedException inner) {
|
||||||
// don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as suppressed because
|
/*
|
||||||
// will contain the Exception ex as cause. See also https://github.com/elastic/elasticsearch/issues/15941
|
* Don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as
|
||||||
|
* suppressed because it will contain the provided exception as its cause. See also
|
||||||
|
* https://github.com/elastic/elasticsearch/issues/15941.
|
||||||
|
*/
|
||||||
} catch (final Exception inner) {
|
} catch (final Exception inner) {
|
||||||
assert (ex != inner.getCause());
|
assert ex != inner.getCause();
|
||||||
ex.addSuppressed(inner);
|
ex.addSuppressed(inner);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,9 +24,6 @@ import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||||
import org.elasticsearch.action.support.ActiveShardCount;
|
import org.elasticsearch.action.support.ActiveShardCount;
|
||||||
import org.elasticsearch.client.Requests;
|
import org.elasticsearch.client.Requests;
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.rest.BaseRestHandler;
|
import org.elasticsearch.rest.BaseRestHandler;
|
||||||
|
@ -50,8 +47,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||||
* </pre>
|
* </pre>
|
||||||
*/
|
*/
|
||||||
public class RestBulkAction extends BaseRestHandler {
|
public class RestBulkAction extends BaseRestHandler {
|
||||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
|
||||||
new DeprecationLogger(Loggers.getLogger(RestBulkAction.class));
|
|
||||||
|
|
||||||
private final boolean allowExplicitIndex;
|
private final boolean allowExplicitIndex;
|
||||||
|
|
||||||
|
@ -80,11 +75,6 @@ public class RestBulkAction extends BaseRestHandler {
|
||||||
String defaultType = request.param("type", MapperService.SINGLE_MAPPING_NAME);
|
String defaultType = request.param("type", MapperService.SINGLE_MAPPING_NAME);
|
||||||
String defaultRouting = request.param("routing");
|
String defaultRouting = request.param("routing");
|
||||||
FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
||||||
String fieldsParam = request.param("fields");
|
|
||||||
if (fieldsParam != null) {
|
|
||||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
|
||||||
}
|
|
||||||
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
|
|
||||||
String defaultPipeline = request.param("pipeline");
|
String defaultPipeline = request.param("pipeline");
|
||||||
String waitForActiveShards = request.param("wait_for_active_shards");
|
String waitForActiveShards = request.param("wait_for_active_shards");
|
||||||
if (waitForActiveShards != null) {
|
if (waitForActiveShards != null) {
|
||||||
|
@ -92,7 +82,7 @@ public class RestBulkAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
|
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
|
||||||
bulkRequest.setRefreshPolicy(request.param("refresh"));
|
bulkRequest.setRefreshPolicy(request.param("refresh"));
|
||||||
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
|
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
|
||||||
defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, request.getXContentType());
|
defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, request.getXContentType());
|
||||||
|
|
||||||
return channel -> client.bulk(bulkRequest, new RestStatusToXContentListener<>(channel));
|
return channel -> client.bulk(bulkRequest, new RestStatusToXContentListener<>(channel));
|
||||||
|
|
|
@ -23,9 +23,6 @@ import org.elasticsearch.action.index.IndexRequest;
|
||||||
import org.elasticsearch.action.support.ActiveShardCount;
|
import org.elasticsearch.action.support.ActiveShardCount;
|
||||||
import org.elasticsearch.action.update.UpdateRequest;
|
import org.elasticsearch.action.update.UpdateRequest;
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.rest.BaseRestHandler;
|
import org.elasticsearch.rest.BaseRestHandler;
|
||||||
|
@ -40,8 +37,6 @@ import java.io.IOException;
|
||||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||||
|
|
||||||
public class RestUpdateAction extends BaseRestHandler {
|
public class RestUpdateAction extends BaseRestHandler {
|
||||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
|
||||||
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
|
|
||||||
|
|
||||||
public RestUpdateAction(Settings settings, RestController controller) {
|
public RestUpdateAction(Settings settings, RestController controller) {
|
||||||
super(settings);
|
super(settings);
|
||||||
|
@ -65,15 +60,7 @@ public class RestUpdateAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert()));
|
updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert()));
|
||||||
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
||||||
String sField = request.param("fields");
|
if (fetchSourceContext != null) {
|
||||||
if (sField != null && fetchSourceContext != null) {
|
|
||||||
throw new IllegalArgumentException("[fields] and [_source] cannot be used in the same request");
|
|
||||||
}
|
|
||||||
if (sField != null) {
|
|
||||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
|
||||||
String[] sFields = Strings.splitStringByCommaToArray(sField);
|
|
||||||
updateRequest.fields(sFields);
|
|
||||||
} else if (fetchSourceContext != null) {
|
|
||||||
updateRequest.fetchSource(fetchSourceContext);
|
updateRequest.fetchSource(fetchSourceContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory;
|
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory;
|
||||||
|
@ -50,7 +49,7 @@ import java.util.function.Consumer;
|
||||||
public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregationBuilder> {
|
public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregationBuilder> {
|
||||||
public static final String NAME = Percentiles.TYPE_NAME;
|
public static final String NAME = Percentiles.TYPE_NAME;
|
||||||
|
|
||||||
public static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 };
|
private static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 };
|
||||||
public static final ParseField PERCENTS_FIELD = new ParseField("percents");
|
public static final ParseField PERCENTS_FIELD = new ParseField("percents");
|
||||||
public static final ParseField KEYED_FIELD = new ParseField("keyed");
|
public static final ParseField KEYED_FIELD = new ParseField("keyed");
|
||||||
public static final ParseField METHOD_FIELD = new ParseField("method");
|
public static final ParseField METHOD_FIELD = new ParseField("method");
|
||||||
|
|
|
@ -75,9 +75,9 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
||||||
/** the default number of fragments size in characters */
|
/** the default number of fragments size in characters */
|
||||||
public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100;
|
public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100;
|
||||||
/** the default opening tag */
|
/** the default opening tag */
|
||||||
public static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
|
static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
|
||||||
/** the default closing tag */
|
/** the default closing tag */
|
||||||
public static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
|
static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
|
||||||
|
|
||||||
/** the default opening tags when <tt>tag_schema = "styled"</tt> */
|
/** the default opening tags when <tt>tag_schema = "styled"</tt> */
|
||||||
public static final String[] DEFAULT_STYLED_PRE_TAG = {
|
public static final String[] DEFAULT_STYLED_PRE_TAG = {
|
||||||
|
|
|
@ -94,12 +94,12 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON);
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
|
assertThat(bulkRequest.requests().get(0).id(), equalTo("1"));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}"));
|
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}"));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0"));
|
assertThat(bulkRequest.requests().get(1).id(), equalTo("0"));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1"));
|
assertThat(bulkRequest.requests().get(1).type(), equalTo("type1"));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1"));
|
assertThat(bulkRequest.requests().get(1).index(), equalTo("index1"));
|
||||||
Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script();
|
Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script();
|
||||||
assertThat(script, notNullValue());
|
assertThat(script, notNullValue());
|
||||||
assertThat(script.getIdOrCode(), equalTo("counter += param1"));
|
assertThat(script.getIdOrCode(), equalTo("counter += param1"));
|
||||||
|
@ -107,20 +107,18 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
Map<String, Object> scriptParams = script.getParams();
|
Map<String, Object> scriptParams = script.getParams();
|
||||||
assertThat(scriptParams, notNullValue());
|
assertThat(scriptParams, notNullValue());
|
||||||
assertThat(scriptParams.size(), equalTo(1));
|
assertThat(scriptParams.size(), equalTo(1));
|
||||||
assertThat(((Integer) scriptParams.get("param1")), equalTo(1));
|
assertThat(scriptParams.get("param1"), equalTo(1));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}"));
|
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBulkAllowExplicitIndex() throws Exception {
|
public void testBulkAllowExplicitIndex() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
String bulkAction1 = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
||||||
try {
|
Exception ex = expectThrows(Exception.class,
|
||||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON);
|
() -> new BulkRequest().add(
|
||||||
fail();
|
new BytesArray(bulkAction1.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON));
|
||||||
} catch (Exception e) {
|
assertEquals("explicit index in bulk is not allowed", ex.getMessage());
|
||||||
|
|
||||||
}
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
||||||
|
|
||||||
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
|
||||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON);
|
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -177,6 +175,16 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(9));
|
assertThat(bulkRequest.numberOfActions(), equalTo(9));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testBulkActionShouldNotContainArray() throws Exception {
|
||||||
|
String bulkAction = "{ \"index\":{\"_index\":[\"index1\", \"index2\"],\"_type\":\"type1\",\"_id\":\"1\"} }\r\n"
|
||||||
|
+ "{ \"field1\" : \"value1\" }\r\n";
|
||||||
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
|
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON));
|
||||||
|
assertEquals(exc.getMessage(), "Malformed action/metadata line [1]" +
|
||||||
|
", expected a simple value for field [_index] but found [START_ARRAY]");
|
||||||
|
}
|
||||||
|
|
||||||
public void testBulkEmptyObject() throws Exception {
|
public void testBulkEmptyObject() throws Exception {
|
||||||
String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n";
|
String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n";
|
||||||
String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n";
|
String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n";
|
||||||
|
@ -299,7 +307,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
out.write(xContentType.xContent().streamSeparator());
|
out.write(xContentType.xContent().streamSeparator());
|
||||||
try(XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) {
|
try(XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("doc", "{}");
|
builder.startObject("doc").endObject();
|
||||||
Map<String,Object> values = new HashMap<>();
|
Map<String,Object> values = new HashMap<>();
|
||||||
values.put("version", 2L);
|
values.put("version", 2L);
|
||||||
values.put("_index", "index");
|
values.put("_index", "index");
|
||||||
|
|
|
@ -260,13 +260,13 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||||
assertThat(bulkResponse.getItems().length, equalTo(3));
|
assertThat(bulkResponse.getItems().length, equalTo(3));
|
||||||
|
|
||||||
bulkResponse = client().prepareBulk()
|
bulkResponse = client().prepareBulk()
|
||||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFields("field")
|
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFetchSource("field", null)
|
||||||
.setScript(new Script(
|
.setScript(new Script(
|
||||||
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
|
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
|
||||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFields("field")
|
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFetchSource("field", null)
|
||||||
.setScript(new Script(
|
.setScript(new Script(
|
||||||
ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap())))
|
ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap())))
|
||||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFields("field")
|
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFetchSource("field", null)
|
||||||
.setScript(new Script(
|
.setScript(new Script(
|
||||||
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
|
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
@ -279,7 +279,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2"));
|
assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2"));
|
||||||
assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L));
|
assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L));
|
||||||
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue(), equalTo(2));
|
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().sourceAsMap().get("field"), equalTo(2));
|
||||||
assertThat(bulkResponse.getItems()[1].getFailure(), nullValue());
|
assertThat(bulkResponse.getItems()[1].getFailure(), nullValue());
|
||||||
|
|
||||||
assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3"));
|
assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3"));
|
||||||
|
@ -303,7 +303,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||||
builder.add(
|
builder.add(
|
||||||
client().prepareUpdate()
|
client().prepareUpdate()
|
||||||
.setIndex("test").setType("type1").setId(Integer.toString(i))
|
.setIndex("test").setType("type1").setId(Integer.toString(i))
|
||||||
.setFields("counter")
|
.setFetchSource("counter", null)
|
||||||
.setScript(script)
|
.setScript(script)
|
||||||
.setUpsert(jsonBuilder().startObject().field("counter", 1).endObject()));
|
.setUpsert(jsonBuilder().startObject().field("counter", 1).endObject()));
|
||||||
}
|
}
|
||||||
|
@ -319,7 +319,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||||
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
|
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
|
||||||
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
|
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
|
||||||
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L));
|
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L));
|
||||||
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(1));
|
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(1));
|
||||||
|
|
||||||
for (int j = 0; j < 5; j++) {
|
for (int j = 0; j < 5; j++) {
|
||||||
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute()
|
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute()
|
||||||
|
@ -333,7 +333,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||||
builder = client().prepareBulk();
|
builder = client().prepareBulk();
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
UpdateRequestBuilder updateBuilder = client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i))
|
UpdateRequestBuilder updateBuilder = client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i))
|
||||||
.setFields("counter");
|
.setFetchSource("counter", null);
|
||||||
if (i % 2 == 0) {
|
if (i % 2 == 0) {
|
||||||
updateBuilder.setScript(script);
|
updateBuilder.setScript(script);
|
||||||
} else {
|
} else {
|
||||||
|
@ -357,7 +357,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||||
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
|
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
|
||||||
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
|
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
|
||||||
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L));
|
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L));
|
||||||
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(2));
|
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
builder = client().prepareBulk();
|
builder = client().prepareBulk();
|
||||||
|
|
|
@ -61,7 +61,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||||
import static org.elasticsearch.script.MockScriptEngine.mockInlineScript;
|
import static org.elasticsearch.script.MockScriptEngine.mockInlineScript;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||||
import static org.hamcrest.Matchers.arrayContaining;
|
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.contains;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
@ -277,17 +276,26 @@ public class UpdateRequestTests extends ESTestCase {
|
||||||
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Related to issue 15338
|
public void testUnknownFieldParsing() throws Exception {
|
||||||
public void testFieldsParsing() throws Exception {
|
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||||
UpdateRequest request = new UpdateRequest("test", "type1", "1").fromXContent(
|
XContentParser contentParser = createParser(XContentFactory.jsonBuilder()
|
||||||
createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")));
|
.startObject()
|
||||||
assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1"));
|
.field("unknown_field", "test")
|
||||||
assertThat(request.fields(), arrayContaining("_source"));
|
.endObject());
|
||||||
|
|
||||||
request = new UpdateRequest("test", "type2", "2").fromXContent(createParser(JsonXContent.jsonXContent,
|
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> request.fromXContent(contentParser));
|
||||||
new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")));
|
assertEquals("[UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage());
|
||||||
assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2"));
|
|
||||||
assertThat(request.fields(), arrayContaining("field1", "field2"));
|
UpdateRequest request2 = new UpdateRequest("test", "type", "1");
|
||||||
|
XContentParser unknownObject = createParser(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("script", "ctx.op = ctx._source.views == params.count ? 'delete' : 'none'")
|
||||||
|
.startObject("params")
|
||||||
|
.field("count", 1)
|
||||||
|
.endObject()
|
||||||
|
.endObject());
|
||||||
|
ex = expectThrows(IllegalArgumentException.class, () -> request2.fromXContent(unknownObject));
|
||||||
|
assertEquals("[UpdateRequest] unknown field [params], parser not found", ex.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFetchSourceParsing() throws Exception {
|
public void testFetchSourceParsing() throws Exception {
|
||||||
|
@ -444,13 +452,6 @@ public class UpdateRequestTests extends ESTestCase {
|
||||||
BytesReference source = RandomObjects.randomSource(random(), xContentType);
|
BytesReference source = RandomObjects.randomSource(random(), xContentType);
|
||||||
updateRequest.upsert(new IndexRequest().source(source, xContentType));
|
updateRequest.upsert(new IndexRequest().source(source, xContentType));
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
|
||||||
String[] fields = new String[randomIntBetween(0, 5)];
|
|
||||||
for (int i = 0; i < fields.length; i++) {
|
|
||||||
fields[i] = randomAlphaOfLength(5);
|
|
||||||
}
|
|
||||||
updateRequest.fields(fields);
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
updateRequest.fetchSource(randomBoolean());
|
updateRequest.fetchSource(randomBoolean());
|
||||||
|
@ -487,10 +488,8 @@ public class UpdateRequestTests extends ESTestCase {
|
||||||
|
|
||||||
assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop());
|
assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop());
|
||||||
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
|
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
|
||||||
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
|
|
||||||
assertEquals(updateRequest.script(), parsedUpdateRequest.script());
|
assertEquals(updateRequest.script(), parsedUpdateRequest.script());
|
||||||
assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert());
|
assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert());
|
||||||
assertArrayEquals(updateRequest.fields(), parsedUpdateRequest.fields());
|
|
||||||
assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource());
|
assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource());
|
||||||
|
|
||||||
BytesReference finalBytes = toXContent(parsedUpdateRequest, xContentType, humanReadable);
|
BytesReference finalBytes = toXContent(parsedUpdateRequest, xContentType, humanReadable);
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||||
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
||||||
.setDocAsUpsert(true)
|
.setDocAsUpsert(true)
|
||||||
.setFields("_source")
|
.setFetchSource(true)
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||||
assertThat(updateResponse.getGetResult(), notNullValue());
|
assertThat(updateResponse.getGetResult(), notNullValue());
|
||||||
|
@ -241,7 +241,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
assertThrows(client().prepareUpdate(indexOrAlias(), "type1", "1")
|
assertThrows(client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||||
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
||||||
.setDocAsUpsert(false)
|
.setDocAsUpsert(false)
|
||||||
.setFields("_source")
|
.setFetchSource(true)
|
||||||
.execute(), DocumentMissingException.class);
|
.execute(), DocumentMissingException.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,7 +264,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||||
.setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
.setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
||||||
.setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo")))
|
.setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo")))
|
||||||
.setFields("_source")
|
.setFetchSource(true)
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||||
|
@ -293,12 +293,9 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
|
||||||
Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field"));
|
Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field"));
|
||||||
try {
|
DocumentMissingException ex = expectThrows(DocumentMissingException.class,
|
||||||
client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet();
|
() -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet());
|
||||||
fail();
|
assertEquals("[type1][1]: document missing", ex.getMessage());
|
||||||
} catch (DocumentMissingException e) {
|
|
||||||
// all is well
|
|
||||||
}
|
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||||
|
|
||||||
|
@ -353,19 +350,6 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
assertThat(getResponse.isExists(), equalTo(false));
|
assertThat(getResponse.isExists(), equalTo(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
// check fields parameter
|
|
||||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
|
||||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
|
||||||
.setScript(fieldIncScript)
|
|
||||||
.setFields("field")
|
|
||||||
.setFetchSource(true)
|
|
||||||
.execute().actionGet();
|
|
||||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
|
||||||
assertThat(updateResponse.getGetResult(), notNullValue());
|
|
||||||
assertThat(updateResponse.getGetResult().getIndex(), equalTo("test"));
|
|
||||||
assertThat(updateResponse.getGetResult().sourceRef(), notNullValue());
|
|
||||||
assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue());
|
|
||||||
|
|
||||||
// check _source parameter
|
// check _source parameter
|
||||||
client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet();
|
client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet();
|
||||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||||
|
@ -383,7 +367,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
// check updates without script
|
// check updates without script
|
||||||
// add new field
|
// add new field
|
||||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
|
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1"));
|
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1"));
|
||||||
|
@ -391,7 +375,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// change existing field
|
// change existing field
|
||||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
|
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3"));
|
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3"));
|
||||||
|
@ -409,7 +393,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
testMap.put("map1", 8);
|
testMap.put("map1", 8);
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet();
|
client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet();
|
||||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
|
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||||
Map map1 = (Map) getResponse.getSourceAsMap().get("map");
|
Map map1 = (Map) getResponse.getSourceAsMap().get("map");
|
||||||
|
@ -581,7 +565,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||||
assertThat(response.getId(), equalTo(Integer.toString(i)));
|
assertThat(response.getId(), equalTo(Integer.toString(i)));
|
||||||
assertThat(response.isExists(), equalTo(true));
|
assertThat(response.isExists(), equalTo(true));
|
||||||
assertThat(response.getVersion(), equalTo((long) numberOfThreads));
|
assertThat(response.getVersion(), equalTo((long) numberOfThreads));
|
||||||
assertThat((Integer) response.getSource().get("field"), equalTo(numberOfThreads));
|
assertThat(response.getSource().get("field"), equalTo(numberOfThreads));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -248,7 +248,7 @@ public class UpdateNoopIT extends ESIntegTestCase {
|
||||||
UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1")
|
UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1")
|
||||||
.setDoc(xContentBuilder)
|
.setDoc(xContentBuilder)
|
||||||
.setDocAsUpsert(true)
|
.setDocAsUpsert(true)
|
||||||
.setFields("_source");
|
.setFetchSource(true);
|
||||||
if (detectNoop != null) {
|
if (detectNoop != null) {
|
||||||
updateRequest.setDetectNoop(detectNoop);
|
updateRequest.setDetectNoop(detectNoop);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue