Merge branch 'master' into ccr
* master: [TEST] REST client request without leading '/' (#29471) Using ObjectParser in UpdateRequest (#29293) Prevent accidental changes of default values (#29528) [Docs] Add definitions to glossary (#29127) Avoid self-deadlock in the translog (#29520) Minor cleanup in NodeInfo.groovy Lazy configure build tasks that require older JDKs (#29519) Simplify snapshot check in root build file Make NodeInfo#nodeVersion strongly-typed as Version (#29515) Enable license header exclusions (#29379) Use proper Java version for BWC builds (#29493) Mute TranslogTests#testFatalIOExceptionsWhileWritingConcurrently Enable skipping fetching latest for BWC builds (#29497)
This commit is contained in:
commit
ed427953d6
|
@ -498,6 +498,13 @@ will contain your change.
|
|||
. Push both branches to your remote repository.
|
||||
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
|
||||
|
||||
== Skip fetching latest
|
||||
|
||||
For some BWC testing scenarios, you want to use the local clone of the
|
||||
repository without fetching latest. For these use cases, you can set the system
|
||||
property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip
|
||||
fetching the latest from the remote.
|
||||
|
||||
== Test coverage analysis
|
||||
|
||||
Generating test coverage reports for Elasticsearch is currently not possible through Gradle.
|
||||
|
|
|
@ -36,7 +36,7 @@ import java.security.MessageDigest
|
|||
// common maven publishing configuration
|
||||
subprojects {
|
||||
group = 'org.elasticsearch'
|
||||
version = VersionProperties.elasticsearch
|
||||
version = VersionProperties.elasticsearch.toString()
|
||||
description = "Elasticsearch subproject ${project.path}"
|
||||
}
|
||||
|
||||
|
@ -80,7 +80,7 @@ configure(subprojects.findAll { it.projectDir.toPath().startsWith(rootPath) }) {
|
|||
* in a branch if there are only betas and rcs in the branch so we have
|
||||
* *something* to test against. */
|
||||
VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
|
||||
if (versions.currentVersion.toString() != VersionProperties.elasticsearch) {
|
||||
if (versions.currentVersion != VersionProperties.elasticsearch) {
|
||||
throw new GradleException("The last version in Versions.java [${versions.currentVersion}] does not match " +
|
||||
"VersionProperties.elasticsearch [${VersionProperties.elasticsearch}]")
|
||||
}
|
||||
|
@ -245,7 +245,7 @@ subprojects {
|
|||
// other packages (e.g org.elasticsearch.client) will point to server rather than
|
||||
// their own artifacts.
|
||||
if (project.plugins.hasPlugin(BuildPlugin)) {
|
||||
String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
|
||||
String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
|
||||
Closure sortClosure = { a, b -> b.group <=> a.group }
|
||||
Closure depJavadocClosure = { dep ->
|
||||
if (dep.group != null && dep.group.startsWith('org.elasticsearch')) {
|
||||
|
|
|
@ -97,6 +97,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
String compilerJavaHome = findCompilerJavaHome()
|
||||
String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome)
|
||||
File gradleJavaHome = Jvm.current().javaHome
|
||||
|
||||
final Map<Integer, String> javaVersions = [:]
|
||||
for (int version = 7; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) {
|
||||
javaVersions.put(version, findJavaHome(version));
|
||||
}
|
||||
|
||||
String javaVendor = System.getProperty('java.vendor')
|
||||
String javaVersion = System.getProperty('java.version')
|
||||
String gradleJavaVersionDetails = "${javaVendor} ${javaVersion}" +
|
||||
|
@ -158,10 +164,32 @@ class BuildPlugin implements Plugin<Project> {
|
|||
throw new GradleException(message)
|
||||
}
|
||||
|
||||
for (final Map.Entry<Integer, String> javaVersionEntry : javaVersions.entrySet()) {
|
||||
final String javaHome = javaVersionEntry.getValue()
|
||||
if (javaHome == null) {
|
||||
continue
|
||||
}
|
||||
JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome))
|
||||
final JavaVersion expectedJavaVersionEnum
|
||||
final int version = javaVersionEntry.getKey()
|
||||
if (version < 9) {
|
||||
expectedJavaVersionEnum = JavaVersion.toVersion("1." + version)
|
||||
} else {
|
||||
expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version))
|
||||
}
|
||||
if (javaVersionEnum != expectedJavaVersionEnum) {
|
||||
final String message =
|
||||
"the environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java" +
|
||||
" ${expectedJavaVersionEnum} but is [${javaHome}] corresponding to [${javaVersionEnum}]"
|
||||
throw new GradleException(message)
|
||||
}
|
||||
}
|
||||
|
||||
project.rootProject.ext.compilerJavaHome = compilerJavaHome
|
||||
project.rootProject.ext.runtimeJavaHome = runtimeJavaHome
|
||||
project.rootProject.ext.compilerJavaVersion = compilerJavaVersionEnum
|
||||
project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum
|
||||
project.rootProject.ext.javaVersions = javaVersions
|
||||
project.rootProject.ext.buildChecksDone = true
|
||||
}
|
||||
|
||||
|
@ -173,6 +201,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.ext.runtimeJavaHome = project.rootProject.ext.runtimeJavaHome
|
||||
project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion
|
||||
project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion
|
||||
project.ext.javaVersions = project.rootProject.ext.javaVersions
|
||||
}
|
||||
|
||||
private static String findCompilerJavaHome() {
|
||||
|
@ -188,6 +217,27 @@ class BuildPlugin implements Plugin<Project> {
|
|||
return javaHome
|
||||
}
|
||||
|
||||
private static String findJavaHome(int version) {
|
||||
return System.getenv('JAVA' + version + '_HOME')
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Java home for the project for the specified version. If the specified version is not configured, an exception with the specified
|
||||
* message is thrown.
|
||||
*
|
||||
* @param project the project
|
||||
* @param version the version of Java home to obtain
|
||||
* @param message the exception message if Java home for the specified version is not configured
|
||||
* @return Java home for the specified version
|
||||
* @throws GradleException if Java home for the specified version is not configured
|
||||
*/
|
||||
static String getJavaHome(final Project project, final int version, final String message) {
|
||||
if (project.javaVersions.get(version) == null) {
|
||||
throw new GradleException(message)
|
||||
}
|
||||
return project.javaVersions.get(version)
|
||||
}
|
||||
|
||||
private static String findRuntimeJavaHome(final String compilerJavaHome) {
|
||||
assert compilerJavaHome != null
|
||||
return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome
|
||||
|
@ -517,17 +567,18 @@ class BuildPlugin implements Plugin<Project> {
|
|||
jarTask.destinationDir = new File(project.buildDir, 'distributions')
|
||||
// fixup the jar manifest
|
||||
jarTask.doFirst {
|
||||
boolean isSnapshot = VersionProperties.elasticsearch.endsWith("-SNAPSHOT");
|
||||
String version = VersionProperties.elasticsearch;
|
||||
if (isSnapshot) {
|
||||
version = version.substring(0, version.length() - 9)
|
||||
}
|
||||
final Version versionWithoutSnapshot = new Version(
|
||||
VersionProperties.elasticsearch.major,
|
||||
VersionProperties.elasticsearch.minor,
|
||||
VersionProperties.elasticsearch.revision,
|
||||
VersionProperties.elasticsearch.suffix,
|
||||
false)
|
||||
// this doFirst is added before the info plugin, therefore it will run
|
||||
// after the doFirst added by the info plugin, and we can override attributes
|
||||
jarTask.manifest.attributes(
|
||||
'X-Compile-Elasticsearch-Version': version,
|
||||
'X-Compile-Elasticsearch-Version': versionWithoutSnapshot,
|
||||
'X-Compile-Lucene-Version': VersionProperties.lucene,
|
||||
'X-Compile-Elasticsearch-Snapshot': isSnapshot,
|
||||
'X-Compile-Elasticsearch-Snapshot': VersionProperties.elasticsearch.isSnapshot(),
|
||||
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
|
||||
'Build-Java-Version': project.compilerJavaVersion)
|
||||
if (jarTask.manifest.attributes.containsKey('Change') == false) {
|
||||
|
|
|
@ -74,20 +74,36 @@ public class Version {
|
|||
return "${major}.${minor}.${revision}${suffix}${snapshotStr}"
|
||||
}
|
||||
|
||||
public boolean before(Version compareTo) {
|
||||
return id < compareTo.id
|
||||
}
|
||||
|
||||
public boolean before(String compareTo) {
|
||||
return id < fromString(compareTo).id
|
||||
return before(fromString(compareTo))
|
||||
}
|
||||
|
||||
public boolean onOrBefore(Version compareTo) {
|
||||
return id <= compareTo.id
|
||||
}
|
||||
|
||||
public boolean onOrBefore(String compareTo) {
|
||||
return id <= fromString(compareTo).id
|
||||
return onOrBefore(fromString(compareTo))
|
||||
}
|
||||
|
||||
public boolean onOrAfter(Version compareTo) {
|
||||
return id >= compareTo.id
|
||||
}
|
||||
|
||||
public boolean onOrAfter(String compareTo) {
|
||||
return id >= fromString(compareTo).id
|
||||
return onOrAfter(fromString(compareTo))
|
||||
}
|
||||
|
||||
public boolean after(Version compareTo) {
|
||||
return id > compareTo.id
|
||||
}
|
||||
|
||||
public boolean after(String compareTo) {
|
||||
return id > fromString(compareTo).id
|
||||
return after(fromString(compareTo))
|
||||
}
|
||||
|
||||
public boolean onOrBeforeIncludingSuffix(Version otherVersion) {
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.gradle
|
|||
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
|
||||
*/
|
||||
class VersionProperties {
|
||||
static final String elasticsearch
|
||||
static final Version elasticsearch
|
||||
static final String lucene
|
||||
static final Map<String, String> versions = new HashMap<>()
|
||||
static {
|
||||
|
@ -32,7 +32,7 @@ class VersionProperties {
|
|||
throw new RuntimeException('/version.properties resource missing')
|
||||
}
|
||||
props.load(propsStream)
|
||||
elasticsearch = props.getProperty('elasticsearch')
|
||||
elasticsearch = Version.fromString(props.getProperty('elasticsearch'))
|
||||
lucene = props.getProperty('lucene')
|
||||
for (String property : props.stringPropertyNames()) {
|
||||
versions.put(property, props.getProperty(property))
|
||||
|
|
|
@ -41,7 +41,7 @@ public class DocsTestPlugin extends RestTestPlugin {
|
|||
* to the version being built for testing but needs to resolve to
|
||||
* the last released version for docs. */
|
||||
'\\{version\\}':
|
||||
VersionProperties.elasticsearch.replace('-SNAPSHOT', ''),
|
||||
VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''),
|
||||
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
|
||||
]
|
||||
Task listSnippets = project.tasks.create('listSnippets', SnippetsTask)
|
||||
|
|
|
@ -77,7 +77,7 @@ class PluginPropertiesTask extends Copy {
|
|||
'name': extension.name,
|
||||
'description': extension.description,
|
||||
'version': stringSnap(extension.version),
|
||||
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
|
||||
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch.toString()),
|
||||
'javaVersion': project.targetCompatibility as String,
|
||||
'classname': extension.classname,
|
||||
'extendedPlugins': extension.extendedPlugins.join(','),
|
||||
|
|
|
@ -49,6 +49,14 @@ public class LicenseHeadersTask extends AntTask {
|
|||
@Input
|
||||
List<String> approvedLicenses = ['Apache', 'Generated']
|
||||
|
||||
/**
|
||||
* Files that should be excluded from the license header check. Use with extreme care, only in situations where the license on the
|
||||
* source file is compatible with the codebase but we do not want to add the license to the list of approved headers (to avoid the
|
||||
* possibility of inadvertently using the license on our own source files).
|
||||
*/
|
||||
@Input
|
||||
List<String> excludes = []
|
||||
|
||||
/**
|
||||
* Additional license families that may be found. The key is the license category name (5 characters),
|
||||
* followed by the family name and the value list of patterns to search for.
|
||||
|
@ -95,7 +103,7 @@ public class LicenseHeadersTask extends AntTask {
|
|||
for (File dir: dirSet.srcDirs) {
|
||||
// sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main...
|
||||
if (dir.exists()) {
|
||||
ant.fileset(dir: dir)
|
||||
ant.fileset(dir: dir, excludes: excludes.join(' '))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.tasks.Input
|
||||
|
@ -37,7 +38,7 @@ class ClusterConfiguration {
|
|||
int numBwcNodes = 0
|
||||
|
||||
@Input
|
||||
String bwcVersion = null
|
||||
Version bwcVersion = null
|
||||
|
||||
@Input
|
||||
int httpPort = 0
|
||||
|
|
|
@ -107,11 +107,14 @@ class ClusterFormationTasks {
|
|||
for (int i = 0; i < config.numNodes; i++) {
|
||||
// we start N nodes and out of these N nodes there might be M bwc nodes.
|
||||
// for each of those nodes we might have a different configuration
|
||||
String elasticsearchVersion = VersionProperties.elasticsearch
|
||||
Configuration distro = currentDistro
|
||||
final Configuration distro
|
||||
final Version elasticsearchVersion
|
||||
if (i < config.numBwcNodes) {
|
||||
elasticsearchVersion = config.bwcVersion
|
||||
distro = bwcDistro
|
||||
} else {
|
||||
elasticsearchVersion = VersionProperties.elasticsearch
|
||||
distro = currentDistro
|
||||
}
|
||||
NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir)
|
||||
nodes.add(node)
|
||||
|
@ -126,7 +129,7 @@ class ClusterFormationTasks {
|
|||
}
|
||||
|
||||
/** Adds a dependency on the given distribution */
|
||||
static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
|
||||
static void configureDistributionDependency(Project project, String distro, Configuration configuration, Version elasticsearchVersion) {
|
||||
String packaging = distro
|
||||
if (distro == 'tar') {
|
||||
packaging = 'tar.gz'
|
||||
|
@ -137,7 +140,7 @@ class ClusterFormationTasks {
|
|||
}
|
||||
|
||||
/** Adds a dependency on a different version of the given plugin, which will be retrieved using gradle's dependency resolution */
|
||||
static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, String elasticsearchVersion) {
|
||||
static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, Version elasticsearchVersion) {
|
||||
verifyProjectHasBuildPlugin(name, elasticsearchVersion, project, pluginProject)
|
||||
final String pluginName = findPluginName(pluginProject)
|
||||
project.dependencies.add(configuration.name, "org.elasticsearch.plugin:${pluginName}:${elasticsearchVersion}@zip")
|
||||
|
@ -303,7 +306,7 @@ class ClusterFormationTasks {
|
|||
// Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space
|
||||
esConfig['cluster.routing.allocation.disk.watermark.low'] = '1b'
|
||||
esConfig['cluster.routing.allocation.disk.watermark.high'] = '1b'
|
||||
if (Version.fromString(node.nodeVersion).major >= 6) {
|
||||
if (node.nodeVersion.major >= 6) {
|
||||
esConfig['cluster.routing.allocation.disk.watermark.flood_stage'] = '1b'
|
||||
}
|
||||
// increase script compilation limit since tests can rapid-fire script compilations
|
||||
|
@ -803,7 +806,7 @@ class ClusterFormationTasks {
|
|||
return retVal
|
||||
}
|
||||
|
||||
static void verifyProjectHasBuildPlugin(String name, String version, Project project, Project pluginProject) {
|
||||
static void verifyProjectHasBuildPlugin(String name, Version version, Project project, Project pluginProject) {
|
||||
if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) {
|
||||
throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " +
|
||||
"[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin")
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import com.sun.jna.Native
|
||||
|
@ -29,6 +30,8 @@ import java.nio.file.Files
|
|||
import java.nio.file.Path
|
||||
import java.nio.file.Paths
|
||||
|
||||
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||
|
||||
/**
|
||||
* A container for the files and configuration associated with a single node in a test cluster.
|
||||
*/
|
||||
|
@ -100,10 +103,10 @@ class NodeInfo {
|
|||
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
||||
|
||||
/** the version of elasticsearch that this node runs */
|
||||
String nodeVersion
|
||||
Version nodeVersion
|
||||
|
||||
/** Holds node configuration for part of a test cluster. */
|
||||
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) {
|
||||
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, Version nodeVersion, File sharedDir) {
|
||||
this.config = config
|
||||
this.nodeNum = nodeNum
|
||||
this.sharedDir = sharedDir
|
||||
|
@ -162,7 +165,14 @@ class NodeInfo {
|
|||
args.add("${esScript}")
|
||||
}
|
||||
|
||||
env = ['JAVA_HOME': project.runtimeJavaHome]
|
||||
if (nodeVersion.before("6.2.0")) {
|
||||
env = ['JAVA_HOME': "${-> getJavaHome(project, 8, "JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"]
|
||||
} else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) {
|
||||
env = ['JAVA_HOME': "${-> getJavaHome(project, 9, "JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"]
|
||||
} else {
|
||||
env = ['JAVA_HOME': (String) project.runtimeJavaHome]
|
||||
}
|
||||
|
||||
args.addAll("-E", "node.portsfile=true")
|
||||
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
|
||||
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
|
||||
|
@ -284,7 +294,7 @@ class NodeInfo {
|
|||
}
|
||||
|
||||
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
||||
static File homeDir(File baseDir, String distro, String nodeVersion) {
|
||||
static File homeDir(File baseDir, String distro, Version nodeVersion) {
|
||||
String path
|
||||
switch (distro) {
|
||||
case 'integ-test-zip':
|
||||
|
@ -302,7 +312,7 @@ class NodeInfo {
|
|||
return new File(baseDir, path)
|
||||
}
|
||||
|
||||
static File pathConf(File baseDir, String distro, String nodeVersion) {
|
||||
static File pathConf(File baseDir, String distro, Version nodeVersion) {
|
||||
switch (distro) {
|
||||
case 'integ-test-zip':
|
||||
case 'zip':
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActiveShardCount;
|
|||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -68,9 +67,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
|
|||
String defaultIndex = request.param("index");
|
||||
String defaultType = request.param("type");
|
||||
String defaultRouting = request.param("routing");
|
||||
String fieldsParam = request.param("fields");
|
||||
String defaultPipeline = request.param("pipeline");
|
||||
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
|
||||
|
||||
String waitForActiveShards = request.param("wait_for_active_shards");
|
||||
if (waitForActiveShards != null) {
|
||||
|
@ -78,7 +75,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
|
|||
}
|
||||
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
|
||||
bulkRequest.setRefreshPolicy(request.param("refresh"));
|
||||
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
|
||||
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
|
||||
null, defaultPipeline, null, true, request.getXContentType());
|
||||
|
||||
// short circuit the call to the transport layer
|
||||
|
|
|
@ -58,6 +58,7 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
* Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}.
|
||||
|
@ -135,8 +136,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
|||
final RestClientBuilder restClientBuilder = RestClient.builder(
|
||||
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
|
||||
if (pathPrefix.length() > 0) {
|
||||
// sometimes cut off the leading slash
|
||||
restClientBuilder.setPathPrefix(randomBoolean() ? pathPrefix.substring(1) : pathPrefix);
|
||||
restClientBuilder.setPathPrefix(pathPrefix);
|
||||
}
|
||||
|
||||
if (useAuth) {
|
||||
|
@ -281,6 +281,33 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testUrlWithoutLeadingSlash() throws Exception {
|
||||
if (pathPrefix.length() == 0) {
|
||||
try {
|
||||
restClient.performRequest("GET", "200");
|
||||
fail("request should have failed");
|
||||
} catch(ResponseException e) {
|
||||
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
|
||||
}
|
||||
} else {
|
||||
{
|
||||
Response response = restClient.performRequest("GET", "200");
|
||||
//a trailing slash gets automatically added if a pathPrefix is configured
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
}
|
||||
{
|
||||
//pathPrefix is not required to start with '/', will be added automatically
|
||||
try (RestClient restClient = RestClient.builder(
|
||||
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()))
|
||||
.setPathPrefix(pathPrefix.substring(1)).build()) {
|
||||
Response response = restClient.performRequest("GET", "200");
|
||||
//a trailing slash gets automatically added if a pathPrefix is configured
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Response bodyTest(final String method) throws IOException {
|
||||
return bodyTest(restClient, method);
|
||||
}
|
||||
|
|
|
@ -17,11 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.Version
|
||||
|
||||
import java.util.regex.Matcher
|
||||
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||
|
||||
/**
|
||||
* This is a dummy project which does a local checkout of the previous
|
||||
|
@ -54,6 +55,16 @@ subprojects {
|
|||
|
||||
final String remote = System.getProperty("tests.bwc.remote", "elastic")
|
||||
|
||||
final boolean gitFetchLatest
|
||||
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
|
||||
if ("true".equals(gitFetchLatestProperty)) {
|
||||
gitFetchLatest = true
|
||||
} else if ("false".equals(gitFetchLatestProperty)) {
|
||||
gitFetchLatest = false
|
||||
} else {
|
||||
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
|
||||
}
|
||||
|
||||
task createClone(type: LoggedExec) {
|
||||
onlyIf { checkoutDir.exists() == false }
|
||||
commandLine = ['git', 'clone', rootDir, checkoutDir]
|
||||
|
@ -83,7 +94,7 @@ subprojects {
|
|||
}
|
||||
|
||||
task fetchLatest(type: LoggedExec) {
|
||||
onlyIf { project.gradle.startParameter.isOffline() == false }
|
||||
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
|
||||
dependsOn addRemote
|
||||
workingDir = checkoutDir
|
||||
commandLine = ['git', 'fetch', '--all']
|
||||
|
@ -134,12 +145,13 @@ subprojects {
|
|||
task buildBwcVersion(type: Exec) {
|
||||
dependsOn checkoutBwcBranch, writeBuildMetadata
|
||||
workingDir = checkoutDir
|
||||
if (project.rootProject.ext.runtimeJavaVersion == JavaVersion.VERSION_1_8 && ["5.6", "6.0", "6.1"].contains(bwcBranch)) {
|
||||
/*
|
||||
* If runtime Java home is set to JDK 8 and we are building branches that are officially built with JDK 8, push this to JAVA_HOME for
|
||||
* these builds.
|
||||
*/
|
||||
environment('JAVA_HOME', System.getenv('RUNTIME_JAVA_HOME'))
|
||||
if (["5.6", "6.0", "6.1"].contains(bwcBranch)) {
|
||||
// we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds
|
||||
environment('JAVA_HOME', "${-> getJavaHome(project, 8, "JAVA8_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}")
|
||||
} else if ("6.2".equals(bwcBranch)) {
|
||||
environment('JAVA_HOME', "${-> getJavaHome(project, 9, "JAVA9_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}")
|
||||
} else {
|
||||
environment('JAVA_HOME', project.compilerJavaHome)
|
||||
}
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'cmd'
|
||||
|
|
|
@ -61,6 +61,15 @@
|
|||
`object`. The mapping also allows you to define (amongst other things)
|
||||
how the value for a field should be analyzed.
|
||||
|
||||
[[glossary-filter]] filter ::
|
||||
|
||||
A filter is a non-scoring <<glossary-query,query>>, meaning that it does not score documents.
|
||||
It is only concerned about answering the question - "Does this document match?".
|
||||
The answer is always a simple, binary yes or no. This kind of query is said to be made
|
||||
in a <<query-filter-context,filter context>>,
|
||||
hence it is called a filter. Filters are simple checks for set inclusion or exclusion.
|
||||
In most cases, the goal of filtering is to reduce the number of documents that have to be examined.
|
||||
|
||||
[[glossary-index]] index ::
|
||||
|
||||
An index is like a _table_ in a relational database. It has a
|
||||
|
@ -105,6 +114,16 @@
|
|||
+
|
||||
See also <<glossary-routing,routing>>
|
||||
|
||||
[[glossary-query]] query ::
|
||||
|
||||
A query is the basic component of a search. A search can be defined by one or more queries
|
||||
which can be mixed and matched in endless combinations. While <<glossary-filter,filters>> are
|
||||
queries that only determine if a document matches, those queries that also calculate how well
|
||||
the document matches are known as "scoring queries". Those queries assign it a score, which is
|
||||
later used to sort matched documents. Scoring queries take more resources than <<glossary-filter,non scoring queries>>
|
||||
and their query results are not cacheable. As a general rule, use query clauses for full-text
|
||||
search or for any condition that requires scoring, and use filters for everything else.
|
||||
|
||||
[[glossary-replica-shard]] replica shard ::
|
||||
|
||||
Each <<glossary-primary-shard,primary shard>> can have zero or more
|
||||
|
@ -161,7 +180,8 @@
|
|||
|
||||
A term is an exact value that is indexed in Elasticsearch. The terms
|
||||
`foo`, `Foo`, `FOO` are NOT equivalent. Terms (i.e. exact values) can
|
||||
be searched for using _term_ queries. +
|
||||
be searched for using _term_ queries.
|
||||
+
|
||||
See also <<glossary-text,text>> and <<glossary-analysis,analysis>>.
|
||||
|
||||
[[glossary-text]] text ::
|
||||
|
|
|
@ -22,3 +22,7 @@ The following parameters starting with underscore have been removed:
|
|||
Instead of these removed parameters, use their non camel case equivalents without
|
||||
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.
|
||||
|
||||
|
||||
==== The parameter `fields` deprecated in 6.x has been removed from Bulk request
|
||||
and Update request. The Update API returns `400 - Bad request` if request contains
|
||||
unknown parameters (instead of ignored in the previous version).
|
||||
|
|
|
@ -24,8 +24,11 @@ should be able to use the standard launching mechanism which
|
|||
is more flexible and reliable.
|
||||
"""
|
||||
|
||||
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
|
||||
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
|
@ -51,9 +54,8 @@ dependencies {
|
|||
es090 'org.elasticsearch:elasticsearch:0.90.13@zip'
|
||||
}
|
||||
|
||||
if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 || Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
/* We can't run the dependencies with Java 9 so for now we'll skip the whole
|
||||
* thing. We can't get the pid files in windows so we skip that as well.... */
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
// we can't get the pid files in windows so we skip that
|
||||
integTest.enabled = false
|
||||
} else {
|
||||
/* Set up tasks to unzip and run the old versions of ES before running the
|
||||
|
@ -75,7 +77,7 @@ if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 || Os.isFamily(Os.FAMI
|
|||
dependsOn unzip
|
||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }"
|
||||
env 'JAVA_HOME', project.runtimeJavaHome
|
||||
env 'JAVA_HOME', "${-> getJavaHome(project, 7, "JAVA7_HOME must be set to run reindex-from-old")}"
|
||||
args 'oldes.OldElasticsearch',
|
||||
baseDir,
|
||||
unzip.temporaryDir,
|
||||
|
|
|
@ -37,10 +37,6 @@
|
|||
"type" : "string",
|
||||
"description" : "Default document type for items which don't provide one"
|
||||
},
|
||||
"fields": {
|
||||
"type": "list",
|
||||
"description" : "Default comma-separated list of fields to return in the response for updates, can be overridden on each sub-request"
|
||||
},
|
||||
"_source": {
|
||||
"type" : "list",
|
||||
"description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request"
|
||||
|
|
|
@ -27,10 +27,6 @@
|
|||
"type": "string",
|
||||
"description": "Sets the number of shard copies that must be active before proceeding with the update operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)"
|
||||
},
|
||||
"fields": {
|
||||
"type": "list",
|
||||
"description": "A comma-separated list of fields to return in the response"
|
||||
},
|
||||
"_source": {
|
||||
"type" : "list",
|
||||
"description" : "True or false to return the _source field or not, or a list of fields to return"
|
||||
|
|
|
@ -299,7 +299,7 @@ public class BulkProcessor implements Closeable {
|
|||
*/
|
||||
public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
||||
@Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception {
|
||||
bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true, xContentType);
|
||||
bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true, xContentType);
|
||||
executeIfNeeded();
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -36,8 +36,6 @@ import org.elasticsearch.common.bytes.BytesArray;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
|
@ -66,8 +64,6 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
* @see org.elasticsearch.client.Client#bulk(BulkRequest)
|
||||
*/
|
||||
public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest<BulkRequest> {
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(BulkRequest.class));
|
||||
|
||||
private static final int REQUEST_OVERHEAD = 50;
|
||||
|
||||
|
@ -80,7 +76,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
||||
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
|
||||
private static final ParseField PIPELINE = new ParseField("pipeline");
|
||||
private static final ParseField FIELDS = new ParseField("fields");
|
||||
private static final ParseField SOURCE = new ParseField("_source");
|
||||
|
||||
/**
|
||||
|
@ -277,7 +272,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
*/
|
||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
||||
XContentType xContentType) throws IOException {
|
||||
return add(data, defaultIndex, defaultType, null, null, null, null, null, true, xContentType);
|
||||
return add(data, defaultIndex, defaultType, null, null, null, null, true, xContentType);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -285,12 +280,13 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
*/
|
||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex,
|
||||
XContentType xContentType) throws IOException {
|
||||
return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex, xContentType);
|
||||
return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex, xContentType);
|
||||
}
|
||||
|
||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String
|
||||
defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String
|
||||
defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, XContentType xContentType) throws IOException {
|
||||
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
|
||||
@Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext,
|
||||
@Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex,
|
||||
XContentType xContentType) throws IOException {
|
||||
XContent xContent = xContentType.xContent();
|
||||
int line = 0;
|
||||
int from = 0;
|
||||
|
@ -333,7 +329,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
String id = null;
|
||||
String routing = defaultRouting;
|
||||
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
|
||||
String[] fields = defaultFields;
|
||||
String opType = null;
|
||||
long version = Versions.MATCH_ANY;
|
||||
VersionType versionType = VersionType.INTERNAL;
|
||||
|
@ -371,21 +366,14 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
retryOnConflict = parser.intValue();
|
||||
} else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
pipeline = parser.text();
|
||||
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
|
||||
} else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
||||
List<Object> values = parser.list();
|
||||
fields = values.toArray(new String[values.size()]);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||
}
|
||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line +
|
||||
"], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||
} else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||
} else if (token != XContentParser.Token.VALUE_NULL) {
|
||||
|
@ -435,10 +423,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
if (fetchSourceContext != null) {
|
||||
updateRequest.fetchSource(fetchSourceContext);
|
||||
}
|
||||
if (fields != null) {
|
||||
updateRequest.fields(fields);
|
||||
}
|
||||
|
||||
IndexRequest upsertRequest = updateRequest.upsertRequest();
|
||||
if (upsertRequest != null) {
|
||||
upsertRequest.version(version);
|
||||
|
|
|
@ -291,8 +291,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(),
|
||||
indexResponse.getResult());
|
||||
|
||||
if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) ||
|
||||
(updateRequest.fields() != null && updateRequest.fields().length > 0)) {
|
||||
if (updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) {
|
||||
final BytesReference indexSourceAsBytes = updateIndexRequest.source();
|
||||
final Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
||||
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());
|
||||
|
|
|
@ -180,8 +180,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse(
|
||||
ActionListener.<IndexResponse>wrap(response -> {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult());
|
||||
if ((request.fetchSource() != null && request.fetchSource().fetchSource()) ||
|
||||
(request.fields() != null && request.fields().length > 0)) {
|
||||
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
||||
XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType());
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -49,7 +48,7 @@ import org.elasticsearch.script.ScriptService;
|
|||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.LongSupplier;
|
||||
|
@ -292,44 +291,18 @@ public class UpdateHelper extends AbstractComponent {
|
|||
|
||||
/**
|
||||
* Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
|
||||
* For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response
|
||||
*/
|
||||
public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version,
|
||||
final Map<String, Object> source, XContentType sourceContentType,
|
||||
@Nullable final BytesReference sourceAsBytes) {
|
||||
if ((request.fields() == null || request.fields().length == 0) &&
|
||||
(request.fetchSource() == null || request.fetchSource().fetchSource() == false)) {
|
||||
if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) {
|
||||
return null;
|
||||
}
|
||||
SourceLookup sourceLookup = new SourceLookup();
|
||||
sourceLookup.setSource(source);
|
||||
boolean sourceRequested = false;
|
||||
Map<String, DocumentField> fields = null;
|
||||
if (request.fields() != null && request.fields().length > 0) {
|
||||
for (String field : request.fields()) {
|
||||
if (field.equals("_source")) {
|
||||
sourceRequested = true;
|
||||
continue;
|
||||
}
|
||||
Object value = sourceLookup.extractValue(field);
|
||||
if (value != null) {
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>(2);
|
||||
}
|
||||
DocumentField documentField = fields.get(field);
|
||||
if (documentField == null) {
|
||||
documentField = new DocumentField(field, new ArrayList<>(2));
|
||||
fields.put(field, documentField);
|
||||
}
|
||||
documentField.getValues().add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BytesReference sourceFilteredAsBytes = sourceAsBytes;
|
||||
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
|
||||
sourceRequested = true;
|
||||
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
|
||||
SourceLookup sourceLookup = new SourceLookup();
|
||||
sourceLookup.setSource(source);
|
||||
Object value = sourceLookup.filter(request.fetchSource());
|
||||
try {
|
||||
final int initialCapacity = Math.min(1024, sourceAsBytes.length());
|
||||
|
@ -342,11 +315,9 @@ public class UpdateHelper extends AbstractComponent {
|
|||
throw new ElasticsearchException("Error filtering source", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
|
||||
return new GetResult(concreteIndex, request.type(), request.id(), version, true,
|
||||
sourceRequested ? sourceFilteredAsBytes : null, fields);
|
||||
return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap());
|
||||
}
|
||||
|
||||
public static class Result {
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.update;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
|
@ -30,11 +28,14 @@ import org.elasticsearch.action.support.WriteRequest;
|
|||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -48,15 +49,46 @@ import org.elasticsearch.script.ScriptType;
|
|||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||
implements DocWriteRequest<UpdateRequest>, WriteRequest<UpdateRequest>, ToXContentObject {
|
||||
private static ObjectParser<UpdateRequest, Void> PARSER;
|
||||
|
||||
private static final ParseField SCRIPT_FIELD = new ParseField("script");
|
||||
private static final ParseField SCRIPTED_UPSERT_FIELD = new ParseField("scripted_upsert");
|
||||
private static final ParseField UPSERT_FIELD = new ParseField("upsert");
|
||||
private static final ParseField DOC_FIELD = new ParseField("doc");
|
||||
private static final ParseField DOC_AS_UPSERT_FIELD = new ParseField("doc_as_upsert");
|
||||
private static final ParseField DETECT_NOOP_FIELD = new ParseField("detect_noop");
|
||||
private static final ParseField SOURCE_FIELD = new ParseField("_source");
|
||||
|
||||
static {
|
||||
PARSER = new ObjectParser<>(UpdateRequest.class.getSimpleName());
|
||||
PARSER.declareField((request, script) -> request.script = script,
|
||||
(parser, context) -> Script.parse(parser), SCRIPT_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
|
||||
PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD);
|
||||
PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder),
|
||||
(parser, context) -> {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||
builder.copyCurrentStructure(parser);
|
||||
return builder;
|
||||
}, UPSERT_FIELD);
|
||||
PARSER.declareObject((request, builder) -> request.safeDoc().source(builder),
|
||||
(parser, context) -> {
|
||||
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
|
||||
docBuilder.copyCurrentStructure(parser);
|
||||
return docBuilder;
|
||||
}, DOC_FIELD);
|
||||
PARSER.declareBoolean(UpdateRequest::docAsUpsert, DOC_AS_UPSERT_FIELD);
|
||||
PARSER.declareBoolean(UpdateRequest::detectNoop, DETECT_NOOP_FIELD);
|
||||
PARSER.declareField(UpdateRequest::fetchSource,
|
||||
(parser, context) -> FetchSourceContext.fromXContent(parser), SOURCE_FIELD,
|
||||
ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING);
|
||||
}
|
||||
|
||||
private String type;
|
||||
private String id;
|
||||
|
@ -66,7 +98,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
@Nullable
|
||||
Script script;
|
||||
|
||||
private String[] fields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
||||
private long version = Versions.MATCH_ANY;
|
||||
|
@ -365,16 +396,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly specify the fields that will be returned. By default, nothing is returned.
|
||||
* @deprecated Use {@link UpdateRequest#fetchSource(String[], String[])} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest fields(String... fields) {
|
||||
this.fields = fields;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an
|
||||
* "include" and/or "exclude" set which can include simple wildcard
|
||||
|
@ -389,7 +410,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
*/
|
||||
public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) {
|
||||
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
|
||||
this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), new String[] {include}, new String[]{exclude});
|
||||
String[] includes = include == null ? Strings.EMPTY_ARRAY : new String[]{include};
|
||||
String[] excludes = exclude == null ? Strings.EMPTY_ARRAY : new String[]{exclude};
|
||||
this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), includes, excludes);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -428,16 +451,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the fields to be returned.
|
||||
* @deprecated Use {@link UpdateRequest#fetchSource()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public String[] fields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the {@link FetchSourceContext} which defines how the _source should
|
||||
* be fetched.
|
||||
|
@ -707,49 +720,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
}
|
||||
|
||||
public UpdateRequest fromXContent(XContentParser parser) throws IOException {
|
||||
Script script = null;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token == null) {
|
||||
return this;
|
||||
}
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("script".equals(currentFieldName)) {
|
||||
script = Script.parse(parser);
|
||||
} else if ("scripted_upsert".equals(currentFieldName)) {
|
||||
scriptedUpsert = parser.booleanValue();
|
||||
} else if ("upsert".equals(currentFieldName)) {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||
builder.copyCurrentStructure(parser);
|
||||
safeUpsertRequest().source(builder);
|
||||
} else if ("doc".equals(currentFieldName)) {
|
||||
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
|
||||
docBuilder.copyCurrentStructure(parser);
|
||||
safeDoc().source(docBuilder);
|
||||
} else if ("doc_as_upsert".equals(currentFieldName)) {
|
||||
docAsUpsert(parser.booleanValue());
|
||||
} else if ("detect_noop".equals(currentFieldName)) {
|
||||
detectNoop(parser.booleanValue());
|
||||
} else if ("fields".equals(currentFieldName)) {
|
||||
List<Object> fields = null;
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
fields = (List) parser.list();
|
||||
} else if (token.isValue()) {
|
||||
fields = Collections.singletonList(parser.text());
|
||||
}
|
||||
if (fields != null) {
|
||||
fields(fields.toArray(new String[fields.size()]));
|
||||
}
|
||||
} else if ("_source".equals(currentFieldName)) {
|
||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||
}
|
||||
}
|
||||
if (script != null) {
|
||||
this.script = script;
|
||||
}
|
||||
return this;
|
||||
return PARSER.parse(parser, this, null);
|
||||
}
|
||||
|
||||
public boolean docAsUpsert() {
|
||||
|
@ -789,7 +760,12 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
doc = new IndexRequest();
|
||||
doc.readFrom(in);
|
||||
}
|
||||
fields = in.readOptionalStringArray();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
String[] fields = in.readOptionalStringArray();
|
||||
if (fields != null) {
|
||||
throw new IllegalArgumentException("[fields] is no longer supported");
|
||||
}
|
||||
}
|
||||
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
|
||||
if (in.readBoolean()) {
|
||||
upsertRequest = new IndexRequest();
|
||||
|
@ -830,7 +806,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
doc.id(id);
|
||||
doc.writeTo(out);
|
||||
}
|
||||
out.writeOptionalStringArray(fields);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
out.writeOptionalStringArray(null);
|
||||
}
|
||||
out.writeOptionalWriteable(fetchSourceContext);
|
||||
if (upsertRequest == null) {
|
||||
out.writeBoolean(false);
|
||||
|
@ -880,9 +858,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
if (detectNoop == false) {
|
||||
builder.field("detect_noop", detectNoop);
|
||||
}
|
||||
if (fields != null) {
|
||||
builder.array("fields", fields);
|
||||
}
|
||||
if (fetchSourceContext != null) {
|
||||
builder.field("_source", fetchSourceContext);
|
||||
}
|
||||
|
@ -908,9 +883,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
}
|
||||
res.append(", scripted_upsert[").append(scriptedUpsert).append("]");
|
||||
res.append(", detect_noop[").append(detectNoop).append("]");
|
||||
if (fields != null) {
|
||||
res.append(", fields[").append(Arrays.toString(fields)).append("]");
|
||||
}
|
||||
return res.append("}").toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,20 +26,15 @@ import org.elasticsearch.action.support.replication.ReplicationRequest;
|
|||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.rest.action.document.RestUpdateAction;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<UpdateRequest, UpdateResponse, UpdateRequestBuilder>
|
||||
implements WriteRequestBuilder<UpdateRequestBuilder> {
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
|
||||
|
||||
public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) {
|
||||
super(client, action, new UpdateRequest());
|
||||
|
@ -87,17 +82,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly specify the fields that will be returned. By default, nothing is returned.
|
||||
* @deprecated Use {@link UpdateRequestBuilder#setFetchSource(String[], String[])} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequestBuilder setFields(String... fields) {
|
||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
||||
request.fields(fields);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an
|
||||
* "include" and/or "exclude" set which can include simple wildcard
|
||||
|
|
|
@ -585,12 +585,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
if (current.generation == location.generation) {
|
||||
// no need to fsync here the read operation will ensure that buffers are written to disk
|
||||
// if they are still in RAM and we are reading onto that position
|
||||
try {
|
||||
return current.read(location);
|
||||
} catch (final Exception ex) {
|
||||
closeOnTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
} else {
|
||||
// read backwards - it's likely we need to read on that is recent
|
||||
for (int i = readers.size() - 1; i >= 0; i--) {
|
||||
|
@ -600,6 +595,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
}
|
||||
}
|
||||
} catch (final Exception ex) {
|
||||
closeOnTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -749,15 +747,28 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the translog if the current translog writer experienced a tragic exception.
|
||||
*
|
||||
* Note that in case this thread closes the translog it must not already be holding a read lock on the translog as it will acquire a
|
||||
* write lock in the course of closing the translog
|
||||
*
|
||||
* @param ex if an exception occurs closing the translog, it will be suppressed into the provided exception
|
||||
*/
|
||||
private void closeOnTragicEvent(final Exception ex) {
|
||||
// we can not hold a read lock here because closing will attempt to obtain a write lock and that would result in self-deadlock
|
||||
assert readLock.isHeldByCurrentThread() == false : Thread.currentThread().getName();
|
||||
if (current.getTragicException() != null) {
|
||||
try {
|
||||
close();
|
||||
} catch (final AlreadyClosedException inner) {
|
||||
// don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as suppressed because
|
||||
// will contain the Exception ex as cause. See also https://github.com/elastic/elasticsearch/issues/15941
|
||||
/*
|
||||
* Don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as
|
||||
* suppressed because it will contain the provided exception as its cause. See also
|
||||
* https://github.com/elastic/elasticsearch/issues/15941.
|
||||
*/
|
||||
} catch (final Exception inner) {
|
||||
assert (ex != inner.getCause());
|
||||
assert ex != inner.getCause();
|
||||
ex.addSuppressed(inner);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,9 +24,6 @@ import org.elasticsearch.action.bulk.BulkShardRequest;
|
|||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -50,8 +47,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
|||
* </pre>
|
||||
*/
|
||||
public class RestBulkAction extends BaseRestHandler {
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(RestBulkAction.class));
|
||||
|
||||
private final boolean allowExplicitIndex;
|
||||
|
||||
|
@ -80,11 +75,6 @@ public class RestBulkAction extends BaseRestHandler {
|
|||
String defaultType = request.param("type", MapperService.SINGLE_MAPPING_NAME);
|
||||
String defaultRouting = request.param("routing");
|
||||
FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
||||
String fieldsParam = request.param("fields");
|
||||
if (fieldsParam != null) {
|
||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
||||
}
|
||||
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
|
||||
String defaultPipeline = request.param("pipeline");
|
||||
String waitForActiveShards = request.param("wait_for_active_shards");
|
||||
if (waitForActiveShards != null) {
|
||||
|
@ -92,7 +82,7 @@ public class RestBulkAction extends BaseRestHandler {
|
|||
}
|
||||
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
|
||||
bulkRequest.setRefreshPolicy(request.param("refresh"));
|
||||
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
|
||||
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
|
||||
defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, request.getXContentType());
|
||||
|
||||
return channel -> client.bulk(bulkRequest, new RestStatusToXContentListener<>(channel));
|
||||
|
|
|
@ -23,9 +23,6 @@ import org.elasticsearch.action.index.IndexRequest;
|
|||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -40,8 +37,6 @@ import java.io.IOException;
|
|||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestUpdateAction extends BaseRestHandler {
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
|
||||
|
||||
public RestUpdateAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
|
@ -65,15 +60,7 @@ public class RestUpdateAction extends BaseRestHandler {
|
|||
}
|
||||
updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert()));
|
||||
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
||||
String sField = request.param("fields");
|
||||
if (sField != null && fetchSourceContext != null) {
|
||||
throw new IllegalArgumentException("[fields] and [_source] cannot be used in the same request");
|
||||
}
|
||||
if (sField != null) {
|
||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
||||
String[] sFields = Strings.splitStringByCommaToArray(sField);
|
||||
updateRequest.fields(sFields);
|
||||
} else if (fetchSourceContext != null) {
|
||||
if (fetchSourceContext != null) {
|
||||
updateRequest.fetchSource(fetchSourceContext);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory;
|
||||
|
@ -50,7 +49,7 @@ import java.util.function.Consumer;
|
|||
public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregationBuilder> {
|
||||
public static final String NAME = Percentiles.TYPE_NAME;
|
||||
|
||||
public static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 };
|
||||
private static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 };
|
||||
public static final ParseField PERCENTS_FIELD = new ParseField("percents");
|
||||
public static final ParseField KEYED_FIELD = new ParseField("keyed");
|
||||
public static final ParseField METHOD_FIELD = new ParseField("method");
|
||||
|
|
|
@ -75,9 +75,9 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
/** the default number of fragments size in characters */
|
||||
public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100;
|
||||
/** the default opening tag */
|
||||
public static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
|
||||
static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
|
||||
/** the default closing tag */
|
||||
public static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
|
||||
static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
|
||||
|
||||
/** the default opening tags when <tt>tag_schema = "styled"</tt> */
|
||||
public static final String[] DEFAULT_STYLED_PRE_TAG = {
|
||||
|
|
|
@ -94,12 +94,12 @@ public class BulkRequestTests extends ESTestCase {
|
|||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
|
||||
assertThat(bulkRequest.requests().get(0).id(), equalTo("1"));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}"));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0"));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1"));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1"));
|
||||
assertThat(bulkRequest.requests().get(1).id(), equalTo("0"));
|
||||
assertThat(bulkRequest.requests().get(1).type(), equalTo("type1"));
|
||||
assertThat(bulkRequest.requests().get(1).index(), equalTo("index1"));
|
||||
Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("counter += param1"));
|
||||
|
@ -107,20 +107,18 @@ public class BulkRequestTests extends ESTestCase {
|
|||
Map<String, Object> scriptParams = script.getParams();
|
||||
assertThat(scriptParams, notNullValue());
|
||||
assertThat(scriptParams.size(), equalTo(1));
|
||||
assertThat(((Integer) scriptParams.get("param1")), equalTo(1));
|
||||
assertThat(scriptParams.get("param1"), equalTo(1));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}"));
|
||||
}
|
||||
|
||||
public void testBulkAllowExplicitIndex() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
||||
try {
|
||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON);
|
||||
fail();
|
||||
} catch (Exception e) {
|
||||
String bulkAction1 = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
||||
Exception ex = expectThrows(Exception.class,
|
||||
() -> new BulkRequest().add(
|
||||
new BytesArray(bulkAction1.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON));
|
||||
assertEquals("explicit index in bulk is not allowed", ex.getMessage());
|
||||
|
||||
}
|
||||
|
||||
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON);
|
||||
}
|
||||
|
||||
|
@ -177,6 +175,16 @@ public class BulkRequestTests extends ESTestCase {
|
|||
assertThat(bulkRequest.numberOfActions(), equalTo(9));
|
||||
}
|
||||
|
||||
public void testBulkActionShouldNotContainArray() throws Exception {
|
||||
String bulkAction = "{ \"index\":{\"_index\":[\"index1\", \"index2\"],\"_type\":\"type1\",\"_id\":\"1\"} }\r\n"
|
||||
+ "{ \"field1\" : \"value1\" }\r\n";
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON));
|
||||
assertEquals(exc.getMessage(), "Malformed action/metadata line [1]" +
|
||||
", expected a simple value for field [_index] but found [START_ARRAY]");
|
||||
}
|
||||
|
||||
public void testBulkEmptyObject() throws Exception {
|
||||
String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n";
|
||||
String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n";
|
||||
|
@ -299,7 +307,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
out.write(xContentType.xContent().streamSeparator());
|
||||
try(XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) {
|
||||
builder.startObject();
|
||||
builder.field("doc", "{}");
|
||||
builder.startObject("doc").endObject();
|
||||
Map<String,Object> values = new HashMap<>();
|
||||
values.put("version", 2L);
|
||||
values.put("_index", "index");
|
||||
|
|
|
@ -260,13 +260,13 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
assertThat(bulkResponse.getItems().length, equalTo(3));
|
||||
|
||||
bulkResponse = client().prepareBulk()
|
||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFields("field")
|
||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFetchSource("field", null)
|
||||
.setScript(new Script(
|
||||
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
|
||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFields("field")
|
||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFetchSource("field", null)
|
||||
.setScript(new Script(
|
||||
ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap())))
|
||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFields("field")
|
||||
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFetchSource("field", null)
|
||||
.setScript(new Script(
|
||||
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
|
||||
.execute().actionGet();
|
||||
|
@ -279,7 +279,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
|
||||
assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2"));
|
||||
assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L));
|
||||
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue(), equalTo(2));
|
||||
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().sourceAsMap().get("field"), equalTo(2));
|
||||
assertThat(bulkResponse.getItems()[1].getFailure(), nullValue());
|
||||
|
||||
assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3"));
|
||||
|
@ -303,7 +303,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
builder.add(
|
||||
client().prepareUpdate()
|
||||
.setIndex("test").setType("type1").setId(Integer.toString(i))
|
||||
.setFields("counter")
|
||||
.setFetchSource("counter", null)
|
||||
.setScript(script)
|
||||
.setUpsert(jsonBuilder().startObject().field("counter", 1).endObject()));
|
||||
}
|
||||
|
@ -319,7 +319,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
|
||||
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
|
||||
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L));
|
||||
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(1));
|
||||
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(1));
|
||||
|
||||
for (int j = 0; j < 5; j++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute()
|
||||
|
@ -333,7 +333,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
builder = client().prepareBulk();
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
UpdateRequestBuilder updateBuilder = client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i))
|
||||
.setFields("counter");
|
||||
.setFetchSource("counter", null);
|
||||
if (i % 2 == 0) {
|
||||
updateBuilder.setScript(script);
|
||||
} else {
|
||||
|
@ -357,7 +357,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
|
||||
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
|
||||
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L));
|
||||
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(2));
|
||||
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(2));
|
||||
}
|
||||
|
||||
builder = client().prepareBulk();
|
||||
|
|
|
@ -61,7 +61,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||
import static org.elasticsearch.script.MockScriptEngine.mockInlineScript;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -277,17 +276,26 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
}
|
||||
|
||||
// Related to issue 15338
|
||||
public void testFieldsParsing() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type1", "1").fromXContent(
|
||||
createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")));
|
||||
assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1"));
|
||||
assertThat(request.fields(), arrayContaining("_source"));
|
||||
public void testUnknownFieldParsing() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||
XContentParser contentParser = createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("unknown_field", "test")
|
||||
.endObject());
|
||||
|
||||
request = new UpdateRequest("test", "type2", "2").fromXContent(createParser(JsonXContent.jsonXContent,
|
||||
new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")));
|
||||
assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2"));
|
||||
assertThat(request.fields(), arrayContaining("field1", "field2"));
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> request.fromXContent(contentParser));
|
||||
assertEquals("[UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage());
|
||||
|
||||
UpdateRequest request2 = new UpdateRequest("test", "type", "1");
|
||||
XContentParser unknownObject = createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("script", "ctx.op = ctx._source.views == params.count ? 'delete' : 'none'")
|
||||
.startObject("params")
|
||||
.field("count", 1)
|
||||
.endObject()
|
||||
.endObject());
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> request2.fromXContent(unknownObject));
|
||||
assertEquals("[UpdateRequest] unknown field [params], parser not found", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testFetchSourceParsing() throws Exception {
|
||||
|
@ -444,13 +452,6 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
BytesReference source = RandomObjects.randomSource(random(), xContentType);
|
||||
updateRequest.upsert(new IndexRequest().source(source, xContentType));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String[] fields = new String[randomIntBetween(0, 5)];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
fields[i] = randomAlphaOfLength(5);
|
||||
}
|
||||
updateRequest.fields(fields);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
if (randomBoolean()) {
|
||||
updateRequest.fetchSource(randomBoolean());
|
||||
|
@ -487,10 +488,8 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop());
|
||||
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
|
||||
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
|
||||
assertEquals(updateRequest.script(), parsedUpdateRequest.script());
|
||||
assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert());
|
||||
assertArrayEquals(updateRequest.fields(), parsedUpdateRequest.fields());
|
||||
assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource());
|
||||
|
||||
BytesReference finalBytes = toXContent(parsedUpdateRequest, xContentType, humanReadable);
|
||||
|
|
|
@ -225,7 +225,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
||||
.setDocAsUpsert(true)
|
||||
.setFields("_source")
|
||||
.setFetchSource(true)
|
||||
.execute().actionGet();
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
assertThat(updateResponse.getGetResult(), notNullValue());
|
||||
|
@ -241,7 +241,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
assertThrows(client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
||||
.setDocAsUpsert(false)
|
||||
.setFields("_source")
|
||||
.setFetchSource(true)
|
||||
.execute(), DocumentMissingException.class);
|
||||
}
|
||||
|
||||
|
@ -264,7 +264,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
|
||||
.setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo")))
|
||||
.setFields("_source")
|
||||
.setFetchSource(true)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
@ -293,12 +293,9 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field"));
|
||||
try {
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet();
|
||||
fail();
|
||||
} catch (DocumentMissingException e) {
|
||||
// all is well
|
||||
}
|
||||
DocumentMissingException ex = expectThrows(DocumentMissingException.class,
|
||||
() -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet());
|
||||
assertEquals("[type1][1]: document missing", ex.getMessage());
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||
|
||||
|
@ -353,19 +350,6 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.isExists(), equalTo(false));
|
||||
}
|
||||
|
||||
// check fields parameter
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setScript(fieldIncScript)
|
||||
.setFields("field")
|
||||
.setFetchSource(true)
|
||||
.execute().actionGet();
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
assertThat(updateResponse.getGetResult(), notNullValue());
|
||||
assertThat(updateResponse.getGetResult().getIndex(), equalTo("test"));
|
||||
assertThat(updateResponse.getGetResult().sourceRef(), notNullValue());
|
||||
assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue());
|
||||
|
||||
// check _source parameter
|
||||
client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
|
@ -383,7 +367,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
// check updates without script
|
||||
// add new field
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1"));
|
||||
|
@ -391,7 +375,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// change existing field
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3"));
|
||||
|
@ -409,7 +393,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
testMap.put("map1", 8);
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
Map map1 = (Map) getResponse.getSourceAsMap().get("map");
|
||||
|
@ -581,7 +565,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
assertThat(response.getId(), equalTo(Integer.toString(i)));
|
||||
assertThat(response.isExists(), equalTo(true));
|
||||
assertThat(response.getVersion(), equalTo((long) numberOfThreads));
|
||||
assertThat((Integer) response.getSource().get("field"), equalTo(numberOfThreads));
|
||||
assertThat(response.getSource().get("field"), equalTo(numberOfThreads));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -248,7 +248,7 @@ public class UpdateNoopIT extends ESIntegTestCase {
|
|||
UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1")
|
||||
.setDoc(xContentBuilder)
|
||||
.setDocAsUpsert(true)
|
||||
.setFields("_source");
|
||||
.setFetchSource(true);
|
||||
if (detectNoop != null) {
|
||||
updateRequest.setDetectNoop(detectNoop);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue