Merge remote-tracking branch 'upstream/7.x' into enrich-7.x

This commit is contained in:
Michael Basnight 2019-05-08 13:59:01 -05:00
commit 202a840da9
343 changed files with 6668 additions and 2970 deletions

View File

@ -6,6 +6,7 @@ if (System.env.ELASTIC_ARTIFACTORY_USERNAME == null || System.env.ELASTIC_ARTIFA
settings.pluginManagement { settings.pluginManagement {
repositories { repositories {
maven { maven {
name "artifactory-gradle-plugins"
url "https://artifactory.elstc.co/artifactory/gradle-plugins" url "https://artifactory.elstc.co/artifactory/gradle-plugins"
credentials { credentials {
username System.env.ELASTIC_ARTIFACTORY_USERNAME username System.env.ELASTIC_ARTIFACTORY_USERNAME
@ -21,6 +22,7 @@ if (System.env.ELASTIC_ARTIFACTORY_USERNAME == null || System.env.ELASTIC_ARTIFA
buildscript { buildscript {
repositories { repositories {
maven { maven {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release/" url "https://artifactory.elstc.co/artifactory/gradle-release/"
credentials { credentials {
username System.env.ELASTIC_ARTIFACTORY_USERNAME username System.env.ELASTIC_ARTIFACTORY_USERNAME
@ -31,6 +33,7 @@ if (System.env.ELASTIC_ARTIFACTORY_USERNAME == null || System.env.ELASTIC_ARTIFA
} }
repositories { repositories {
maven { maven {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release/" url "https://artifactory.elstc.co/artifactory/gradle-release/"
credentials { credentials {
username System.env.ELASTIC_ARTIFACTORY_USERNAME username System.env.ELASTIC_ARTIFACTORY_USERNAME

View File

@ -14,3 +14,5 @@ ES_RUNTIME_JAVA:
- zulu8 - zulu8
- zulu11 - zulu11
- zulu12 - zulu12
- corretto11
- corretto8

View File

@ -88,7 +88,7 @@ subprojects {
} }
repositories { repositories {
maven { maven {
name = 'localTest' name = 'test'
url = "${rootProject.buildDir}/local-test-repo" url = "${rootProject.buildDir}/local-test-repo"
} }
} }

View File

@ -126,6 +126,7 @@ dependencies {
compile 'com.avast.gradle:gradle-docker-compose-plugin:0.8.12' compile 'com.avast.gradle:gradle-docker-compose-plugin:0.8.12'
testCompile "junit:junit:${props.getProperty('junit')}" testCompile "junit:junit:${props.getProperty('junit')}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
testCompile 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2'
} }
/***************************************************************************** /*****************************************************************************
@ -200,7 +201,7 @@ if (project != rootProject) {
task integTest(type: Test) { task integTest(type: Test) {
// integration test requires the local testing repo for example plugin builds // integration test requires the local testing repo for example plugin builds
dependsOn project.rootProject.allprojects.collect { dependsOn project.rootProject.allprojects.collect {
it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'} it.tasks.matching { it.name == 'publishNebulaPublicationToTestRepository'}
} }
dependsOn setupLocalDownloads dependsOn setupLocalDownloads
exclude "**/*Tests.class" exclude "**/*Tests.class"

View File

@ -39,6 +39,9 @@ import org.gradle.api.artifacts.ModuleVersionIdentifier
import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ProjectDependency
import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.ResolvedArtifact
import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.artifacts.dsl.RepositoryHandler
import org.gradle.api.artifacts.repositories.ArtifactRepository
import org.gradle.api.artifacts.repositories.IvyArtifactRepository
import org.gradle.api.artifacts.repositories.MavenArtifactRepository
import org.gradle.api.credentials.HttpHeaderCredentials import org.gradle.api.credentials.HttpHeaderCredentials
import org.gradle.api.execution.TaskActionListener import org.gradle.api.execution.TaskActionListener
import org.gradle.api.execution.TaskExecutionGraph import org.gradle.api.execution.TaskExecutionGraph
@ -580,6 +583,16 @@ class BuildPlugin implements Plugin<Project> {
/** Adds repositories used by ES dependencies */ /** Adds repositories used by ES dependencies */
static void configureRepositories(Project project) { static void configureRepositories(Project project) {
project.getRepositories().all { repository ->
if (repository instanceof MavenArtifactRepository) {
final MavenArtifactRepository maven = (MavenArtifactRepository) repository
assertRepositoryURIUsesHttps(maven, project, maven.getUrl())
repository.getArtifactUrls().each { uri -> assertRepositoryURIUsesHttps(project, uri) }
} else if (repository instanceof IvyArtifactRepository) {
final IvyArtifactRepository ivy = (IvyArtifactRepository) repository
assertRepositoryURIUsesHttps(ivy, project, ivy.getUrl())
}
}
RepositoryHandler repos = project.repositories RepositoryHandler repos = project.repositories
if (System.getProperty("repos.mavenLocal") != null) { if (System.getProperty("repos.mavenLocal") != null) {
// with -Drepos.mavenLocal=true we can force checking the local .m2 repo which is // with -Drepos.mavenLocal=true we can force checking the local .m2 repo which is
@ -589,6 +602,7 @@ class BuildPlugin implements Plugin<Project> {
} }
repos.jcenter() repos.jcenter()
repos.ivy { repos.ivy {
name "elasticsearch"
url "https://artifacts.elastic.co/downloads" url "https://artifacts.elastic.co/downloads"
patternLayout { patternLayout {
artifact "elasticsearch/[module]-[revision](-[classifier]).[ext]" artifact "elasticsearch/[module]-[revision](-[classifier]).[ext]"
@ -617,6 +631,12 @@ class BuildPlugin implements Plugin<Project> {
} }
} }
private static void assertRepositoryURIUsesHttps(final ArtifactRepository repository, final Project project, final URI uri) {
if (uri != null && uri.toURL().getProtocol().equals("http")) {
throw new GradleException("repository [${repository.name}] on project with path [${project.path}] is using http for artifacts on [${uri.toURL()}]")
}
}
/** /**
* Returns a closure which can be used with a MavenPom for fixing problems with gradle generated poms. * Returns a closure which can be used with a MavenPom for fixing problems with gradle generated poms.
* *

View File

@ -99,9 +99,8 @@ class PluginBuildPlugin extends BuildPlugin {
project.tasks.run.dependsOn(project.tasks.bundlePlugin) project.tasks.run.dependsOn(project.tasks.bundlePlugin)
if (isModule) { if (isModule) {
project.tasks.run.clusterConfig.module(project)
project.tasks.run.clusterConfig.distribution = System.getProperty( project.tasks.run.clusterConfig.distribution = System.getProperty(
'run.distribution', 'integ-test-zip' 'run.distribution', isXPackModule ? 'default' : 'oss'
) )
} else { } else {
project.tasks.run.clusterConfig.plugin(project.path) project.tasks.run.clusterConfig.plugin(project.path)

View File

@ -964,6 +964,8 @@ class ClusterFormationTasks {
} }
doLast { doLast {
project.delete(node.pidFile) project.delete(node.pidFile)
// Large tests can exhaust disk space, clean up jdk from the distribution to save some space
project.delete(new File(node.homeDir, "jdk"))
} }
} }
} }

View File

@ -70,7 +70,7 @@ class RestIntegTestTask extends DefaultTask {
project.testClusters { project.testClusters {
"$name" { "$name" {
distribution = 'INTEG_TEST' distribution = 'INTEG_TEST'
version = project.version version = VersionProperties.elasticsearch
javaHome = project.file(project.ext.runtimeJavaHome) javaHome = project.file(project.ext.runtimeJavaHome)
} }
} }

View File

@ -174,6 +174,7 @@ class VagrantTestPlugin implements Plugin<Project> {
which should work for 5.0.0+. This isn't a real ivy repository but gradle which should work for 5.0.0+. This isn't a real ivy repository but gradle
is fine with that */ is fine with that */
repos.ivy { repos.ivy {
name "elasticsearch"
artifactPattern "https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext]" artifactPattern "https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext]"
} }
} }

View File

@ -20,16 +20,14 @@ package org.elasticsearch.gradle;
public enum Distribution { public enum Distribution {
INTEG_TEST("elasticsearch", "integ-test-zip"), INTEG_TEST("elasticsearch"),
DEFAULT("elasticsearch", "elasticsearch"), DEFAULT("elasticsearch"),
OSS("elasticsearch-oss", "elasticsearch-oss"); OSS("elasticsearch-oss");
private final String artifactName; private final String artifactName;
private final String group;
Distribution(String name, String group) { Distribution(String name) {
this.artifactName = name; this.artifactName = name;
this.group = group;
} }
public String getArtifactName() { public String getArtifactName() {
@ -37,7 +35,11 @@ public enum Distribution {
} }
public String getGroup() { public String getGroup() {
return "org.elasticsearch.distribution." + group; if (this.equals(INTEG_TEST)) {
return "org.elasticsearch.distribution.integ-test-zip";
} else {
return "org.elasticsearch.distribution." + name().toLowerCase();
}
} }
public String getFileExtension() { public String getFileExtension() {

View File

@ -0,0 +1,112 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import org.gradle.api.Buildable;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Pattern;
public class Jdk implements Buildable, Iterable<File> {
static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)(@([a-f0-9]{32}))?");
private static final List<String> ALLOWED_PLATFORMS = Collections.unmodifiableList(Arrays.asList("linux", "windows", "darwin"));
private final String name;
private final Configuration configuration;
private final Property<String> version;
private final Property<String> platform;
Jdk(String name, Project project) {
this.name = name;
this.configuration = project.getConfigurations().create("jdk_" + name);
this.version = project.getObjects().property(String.class);
this.platform = project.getObjects().property(String.class);
}
public String getName() {
return name;
}
public String getVersion() {
return version.get();
}
public void setVersion(String version) {
if (VERSION_PATTERN.matcher(version).matches() == false) {
throw new IllegalArgumentException("malformed version [" + version + "] for jdk [" + name + "]");
}
this.version.set(version);
}
public String getPlatform() {
return platform.get();
}
public void setPlatform(String platform) {
if (ALLOWED_PLATFORMS.contains(platform) == false) {
throw new IllegalArgumentException(
"unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS);
}
this.platform.set(platform);
}
// pkg private, for internal use
Configuration getConfiguration() {
return configuration;
}
@Override
public String toString() {
return configuration.getSingleFile().toString();
}
@Override
public TaskDependency getBuildDependencies() {
return configuration.getBuildDependencies();
}
// internal, make this jdks configuration unmodifiable
void finalizeValues() {
if (version.isPresent() == false) {
throw new IllegalArgumentException("version not specified for jdk [" + name + "]");
}
if (platform.isPresent() == false) {
throw new IllegalArgumentException("platform not specified for jdk [" + name + "]");
}
version.finalizeValue();
platform.finalizeValue();
}
@Override
public Iterator<File> iterator() {
return configuration.iterator();
}
}

View File

@ -0,0 +1,170 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.UnknownTaskException;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.ConfigurationContainer;
import org.gradle.api.artifacts.dsl.DependencyHandler;
import org.gradle.api.artifacts.repositories.IvyArtifactRepository;
import org.gradle.api.file.CopySpec;
import org.gradle.api.file.FileTree;
import org.gradle.api.file.RelativePath;
import org.gradle.api.tasks.Copy;
import org.gradle.api.tasks.TaskProvider;
import java.io.File;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.function.Supplier;
import java.util.regex.Matcher;
public class JdkDownloadPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
NamedDomainObjectContainer<Jdk> jdksContainer = project.container(Jdk.class, name ->
new Jdk(name, project)
);
project.getExtensions().add("jdks", jdksContainer);
project.afterEvaluate(p -> {
for (Jdk jdk : jdksContainer) {
jdk.finalizeValues();
String version = jdk.getVersion();
String platform = jdk.getPlatform();
// depend on the jdk directory "artifact" from the root project
DependencyHandler dependencies = project.getDependencies();
Map<String, Object> depConfig = new HashMap<>();
depConfig.put("path", ":"); // root project
depConfig.put("configuration", configName("extracted_jdk", version, platform));
dependencies.add(jdk.getConfiguration().getName(), dependencies.project(depConfig));
// ensure a root level jdk download task exists
setupRootJdkDownload(project.getRootProject(), platform, version);
}
});
}
private static void setupRootJdkDownload(Project rootProject, String platform, String version) {
String extractTaskName = "extract" + capitalize(platform) + "Jdk" + version;
// NOTE: this is *horrendous*, but seems to be the only way to check for the existence of a registered task
try {
rootProject.getTasks().named(extractTaskName);
// already setup this version
return;
} catch (UnknownTaskException e) {
// fall through: register the task
}
// decompose the bundled jdk version, broken into elements as: [feature, interim, update, build]
// Note the "patch" version is not yet handled here, as it has not yet been used by java.
Matcher jdkVersionMatcher = Jdk.VERSION_PATTERN.matcher(version);
if (jdkVersionMatcher.matches() == false) {
throw new IllegalArgumentException("Malformed jdk version [" + version + "]");
}
String jdkVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "");
String jdkMajor = jdkVersionMatcher.group(1);
String jdkBuild = jdkVersionMatcher.group(3);
String hash = jdkVersionMatcher.group(5);
// add fake ivy repo for jdk url
String repoName = "jdk_repo_" + version;
if (rootProject.getRepositories().findByName(repoName) == null) {
// simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back
rootProject.getRepositories().ivy(ivyRepo -> {
ivyRepo.setName(repoName);
ivyRepo.setUrl("https://download.oracle.com");
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
ivyRepo.patternLayout(layout ->
layout.artifact("java/GA/jdk" + jdkMajor + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
ivyRepo.content(content -> content.includeGroup("jdk"));
});
// current pattern since 12.0.1
rootProject.getRepositories().ivy(ivyRepo -> {
ivyRepo.setName(repoName + "_with_hash");
ivyRepo.setUrl("https://download.oracle.com");
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
ivyRepo.patternLayout(layout -> layout.artifact(
"java/GA/jdk" + jdkVersion + "/" + hash + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
ivyRepo.content(content -> content.includeGroup("jdk"));
});
}
// add the jdk as a "dependency"
final ConfigurationContainer configurations = rootProject.getConfigurations();
String remoteConfigName = configName("openjdk", version, platform);
String localConfigName = configName("extracted_jdk", version, platform);
Configuration jdkConfig = configurations.findByName(remoteConfigName);
if (jdkConfig == null) {
jdkConfig = configurations.create(remoteConfigName);
configurations.create(localConfigName);
}
String extension = platform.equals("windows") ? "zip" : "tar.gz";
String jdkDep = "jdk:" + (platform.equals("darwin") ? "osx" : platform) + ":" + jdkVersion + "@" + extension;
rootProject.getDependencies().add(configName("openjdk", version, platform), jdkDep);
// add task for extraction
// TODO: look into doing this as an artifact transform, which are cacheable starting in gradle 5.3
int rootNdx = platform.equals("darwin") ? 2 : 1;
Action<CopySpec> removeRootDir = copy -> {
// remove extra unnecessary directory levels
copy.eachFile(details -> {
String[] pathSegments = details.getRelativePath().getSegments();
String[] newPathSegments = Arrays.copyOfRange(pathSegments, rootNdx, pathSegments.length);
details.setRelativePath(new RelativePath(true, newPathSegments));
});
copy.setIncludeEmptyDirs(false);
};
// delay resolving jdkConfig until runtime
Supplier<File> jdkArchiveGetter = jdkConfig::getSingleFile;
final Callable<FileTree> fileGetter;
if (extension.equals("zip")) {
fileGetter = () -> rootProject.zipTree(jdkArchiveGetter.get());
} else {
fileGetter = () -> rootProject.tarTree(rootProject.getResources().gzip(jdkArchiveGetter.get()));
}
String extractDir = rootProject.getBuildDir().toPath().resolve("jdks/openjdk-" + jdkVersion + "_" + platform).toString();
TaskProvider<Copy> extractTask = rootProject.getTasks().register(extractTaskName, Copy.class, copyTask -> {
copyTask.doFirst(t -> rootProject.delete(extractDir));
copyTask.into(extractDir);
copyTask.from(fileGetter, removeRootDir);
});
rootProject.getArtifacts().add(localConfigName,
rootProject.getLayout().getProjectDirectory().dir(extractDir),
artifact -> artifact.builtBy(extractTask));
}
private static String configName(String prefix, String version, String platform) {
return prefix + "_" + version + "_" + platform;
}
private static String capitalize(String s) {
return s.substring(0, 1).toUpperCase(Locale.ROOT) + s.substring(1);
}
}

View File

@ -123,8 +123,7 @@ public class WaitForHttpResource {
if (System.nanoTime() < waitUntil) { if (System.nanoTime() < waitUntil) {
Thread.sleep(sleep); Thread.sleep(sleep);
} else { } else {
logger.error("Failed to access url [{}]", url, failure); throw failure;
return false;
} }
} }
} }

View File

@ -22,23 +22,22 @@ import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.Distribution; import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project; import org.gradle.api.Project;
import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging; import org.gradle.api.logging.Logging;
import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UncheckedIOException; import java.io.UncheckedIOException;
import java.net.HttpURLConnection;
import java.net.URI; import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.security.GeneralSecurityException;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
@ -75,6 +74,8 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
services, artifactsExtractDir, workingDirBase services, artifactsExtractDir, workingDirBase
) )
); );
addWaitForClusterHealth();
} }
public void setNumberOfNodes(int numberOfNodes) { public void setNumberOfNodes(int numberOfNodes) {
@ -219,6 +220,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(node -> node.extraConfigFile(destination, from)); nodes.all(node -> node.extraConfigFile(destination, from));
} }
@Override
public void user(Map<String, String> userSpec) {
nodes.all(node -> node.user(userSpec));
}
private void writeUnicastHostsFiles() { private void writeUnicastHostsFiles() {
String unicastUris = nodes.stream().flatMap(node -> node.getAllTransportPortURI().stream()).collect(Collectors.joining("\n")); String unicastUris = nodes.stream().flatMap(node -> node.getAllTransportPortURI().stream()).collect(Collectors.joining("\n"));
nodes.forEach(node -> { nodes.forEach(node -> {
@ -262,9 +268,6 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
writeUnicastHostsFiles(); writeUnicastHostsFiles();
LOGGER.info("Starting to wait for cluster to form"); LOGGER.info("Starting to wait for cluster to form");
addWaitForUri(
"cluster health yellow", "/_cluster/health?wait_for_nodes=>=" + nodes.size() + "&wait_for_status=yellow"
);
waitForConditions(waitConditions, startedAt, CLUSTER_UP_TIMEOUT, CLUSTER_UP_TIMEOUT_UNIT, this); waitForConditions(waitConditions, startedAt, CLUSTER_UP_TIMEOUT, CLUSTER_UP_TIMEOUT_UNIT, this);
} }
@ -279,7 +282,9 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
} }
void eachVersionedDistribution(BiConsumer<String, Distribution> consumer) { void eachVersionedDistribution(BiConsumer<String, Distribution> consumer) {
nodes.forEach(each -> consumer.accept(each.getVersion(), each.getDistribution())); nodes.forEach(each -> {
consumer.accept(each.getVersion(), each.getDistribution());
});
} }
public ElasticsearchNode singleNode() { public ElasticsearchNode singleNode() {
@ -291,21 +296,25 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
return getFirstNode(); return getFirstNode();
} }
private void addWaitForUri(String description, String uri) { private void addWaitForClusterHealth() {
waitConditions.put(description, (node) -> { waitConditions.put("cluster health yellow", (node) -> {
try { try {
URL url = new URL("http://" + getFirstNode().getHttpSocketURI() + uri); WaitForHttpResource wait = new WaitForHttpResource(
HttpURLConnection con = (HttpURLConnection) url.openConnection(); "http", getFirstNode().getHttpSocketURI(), nodes.size()
con.setRequestMethod("GET"); );
con.setConnectTimeout(500); List<Map<String, String>> credentials = getFirstNode().getCredentials();
con.setReadTimeout(500); if (getFirstNode().getCredentials().isEmpty() == false) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(con.getInputStream()))) { wait.setUsername(credentials.get(0).get("useradd"));
String response = reader.lines().collect(Collectors.joining("\n")); wait.setPassword(credentials.get(0).get("-p"));
LOGGER.info("{} -> {} ->\n{}", this, uri, response);
} }
return true; return wait.wait(500);
} catch (IOException e) { } catch (IOException e) {
throw new IllegalStateException("Connection attempt to " + this + " failed", e); throw new IllegalStateException("Connection attempt to " + this + " failed", e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new TestClustersException("Interrupted while waiting for " + this, e);
} catch (GeneralSecurityException e) {
throw new RuntimeException("security exception", e);
} }
}); });
} }

View File

@ -38,6 +38,7 @@ import java.nio.file.Path;
import java.nio.file.StandardCopyOption; import java.nio.file.StandardCopyOption;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@ -86,6 +87,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final Map<String, Supplier<CharSequence>> environment = new LinkedHashMap<>(); private final Map<String, Supplier<CharSequence>> environment = new LinkedHashMap<>();
private final Map<String, File> extraConfigFiles = new HashMap<>(); private final Map<String, File> extraConfigFiles = new HashMap<>();
final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>(); final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>();
private final List<Map<String, String>> credentials = new ArrayList<>();
private final Path confPathRepo; private final Path confPathRepo;
private final Path configFile; private final Path configFile;
@ -117,8 +119,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
esStdoutFile = confPathLogs.resolve("es.stdout.log"); esStdoutFile = confPathLogs.resolve("es.stdout.log");
esStderrFile = confPathLogs.resolve("es.stderr.log"); esStderrFile = confPathLogs.resolve("es.stderr.log");
tmpDir = workingDir.resolve("tmp"); tmpDir = workingDir.resolve("tmp");
waitConditions.put("http ports file", node -> Files.exists(((ElasticsearchNode) node).httpPortsFile)); waitConditions.put("ports files", this::checkPortsFilesExistWithDelay);
waitConditions.put("transport ports file", node -> Files.exists(((ElasticsearchNode)node).transportPortFile));
} }
public String getName() { public String getName() {
@ -276,7 +277,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Path distroArtifact = artifactsExtractDir Path distroArtifact = artifactsExtractDir
.resolve(distribution.getGroup()) .resolve(distribution.getGroup())
.resolve(distribution.getArtifactName() + "-" + getVersion()); .resolve("elasticsearch-" + getVersion());
if (Files.exists(distroArtifact) == false) { if (Files.exists(distroArtifact) == false) {
throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact); throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact);
@ -319,9 +320,25 @@ public class ElasticsearchNode implements TestClusterConfiguration {
copyExtraConfigFiles(); copyExtraConfigFiles();
if (isSettingMissingOrTrue("xpack.security.enabled")) {
if (credentials.isEmpty()) {
user(Collections.emptyMap());
}
credentials.forEach(paramMap -> runElaticsearchBinScript(
"elasticsearch-users",
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
));
}
startElasticsearchProcess(); startElasticsearchProcess();
} }
private boolean isSettingMissingOrTrue(String name) {
return Boolean.valueOf(settings.getOrDefault(name, () -> "false").get().toString());
}
private void copyExtraConfigFiles() { private void copyExtraConfigFiles() {
extraConfigFiles.forEach((destination, from) -> { extraConfigFiles.forEach((destination, from) -> {
if (Files.exists(from.toPath()) == false) { if (Files.exists(from.toPath()) == false) {
@ -375,6 +392,22 @@ public class ElasticsearchNode implements TestClusterConfiguration {
extraConfigFiles.put(destination, from); extraConfigFiles.put(destination, from);
} }
@Override
public void user(Map<String, String> userSpec) {
Set<String> keys = new HashSet<>(userSpec.keySet());
keys.remove("username");
keys.remove("password");
keys.remove("role");
if (keys.isEmpty() == false) {
throw new TestClustersException("Unknown keys in user definition " + keys + " for " + this);
}
Map<String,String> cred = new LinkedHashMap<>();
cred.put("useradd", userSpec.getOrDefault("username","test_user"));
cred.put("-p", userSpec.getOrDefault("password","x-pack-test-password"));
cred.put("-r", userSpec.getOrDefault("role", "superuser"));
credentials.add(cred);
}
private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) { private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) {
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) { try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
services.loggedExec(spec -> { services.loggedExec(spec -> {
@ -752,4 +785,21 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public String toString() { public String toString() {
return "node{" + path + ":" + name + "}"; return "node{" + path + ":" + name + "}";
} }
List<Map<String, String>> getCredentials() {
return credentials;
}
private boolean checkPortsFilesExistWithDelay(TestClusterConfiguration node) {
if (Files.exists(httpPortsFile) && Files.exists(transportPortFile)) {
return true;
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new TestClustersException("Interrupted while waiting for ports files", e);
}
return Files.exists(httpPortsFile) && Files.exists(transportPortFile);
}
} }

View File

@ -27,6 +27,7 @@ import java.io.File;
import java.net.URI; import java.net.URI;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -72,6 +73,8 @@ public interface TestClusterConfiguration {
void extraConfigFile(String destination, File from); void extraConfigFile(String destination, File from);
void user(Map<String, String> userSpec);
String getHttpSocketURI(); String getHttpSocketURI();
String getTransportPortURI(); String getTransportPortURI();
@ -108,7 +111,7 @@ public interface TestClusterConfiguration {
break; break;
} }
} catch (TestClustersException e) { } catch (TestClustersException e) {
throw new TestClustersException(e); throw e;
} catch (Exception e) { } catch (Exception e) {
if (lastException == null) { if (lastException == null) {
lastException = e; lastException = e;
@ -116,12 +119,6 @@ public interface TestClusterConfiguration {
lastException = e; lastException = e;
} }
} }
try {
Thread.sleep(500);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} }
if (conditionMet == false) { if (conditionMet == false) {
String message = "`" + context + "` failed to wait for " + description + " after " + String message = "`" + context + "` failed to wait for " + description + " after " +

View File

@ -20,17 +20,18 @@ package org.elasticsearch.gradle.testclusters;
import groovy.lang.Closure; import groovy.lang.Closure;
import org.elasticsearch.gradle.BwcVersions; import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.Action; import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin; import org.gradle.api.Plugin;
import org.gradle.api.Project; import org.gradle.api.Project;
import org.gradle.api.Task; import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
import org.gradle.api.credentials.HttpHeaderCredentials;
import org.gradle.api.execution.TaskActionListener; import org.gradle.api.execution.TaskActionListener;
import org.gradle.api.execution.TaskExecutionListener; import org.gradle.api.execution.TaskExecutionListener;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree; import org.gradle.api.file.FileTree;
import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging; import org.gradle.api.logging.Logging;
@ -46,7 +47,6 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -56,7 +56,7 @@ public class TestClustersPlugin implements Plugin<Project> {
private static final String LIST_TASK_NAME = "listTestClusters"; private static final String LIST_TASK_NAME = "listTestClusters";
private static final String NODE_EXTENSION_NAME = "testClusters"; private static final String NODE_EXTENSION_NAME = "testClusters";
private static final String HELPER_CONFIGURATION_NAME = "testclusters"; private static final String HELPER_CONFIGURATION_PREFIX = "testclusters";
private static final String SYNC_ARTIFACTS_TASK_NAME = "syncTestClustersArtifacts"; private static final String SYNC_ARTIFACTS_TASK_NAME = "syncTestClustersArtifacts";
private static final int EXECUTOR_SHUTDOWN_TIMEOUT = 1; private static final int EXECUTOR_SHUTDOWN_TIMEOUT = 1;
private static final TimeUnit EXECUTOR_SHUTDOWN_TIMEOUT_UNIT = TimeUnit.MINUTES; private static final TimeUnit EXECUTOR_SHUTDOWN_TIMEOUT_UNIT = TimeUnit.MINUTES;
@ -69,6 +69,10 @@ public class TestClustersPlugin implements Plugin<Project> {
private final Thread shutdownHook = new Thread(this::shutDownAllClusters); private final Thread shutdownHook = new Thread(this::shutDownAllClusters);
private ExecutorService executorService = Executors.newSingleThreadExecutor(); private ExecutorService executorService = Executors.newSingleThreadExecutor();
public static String getHelperConfigurationName(String version) {
return HELPER_CONFIGURATION_PREFIX + "-" + version;
}
@Override @Override
public void apply(Project project) { public void apply(Project project) {
Project rootProject = project.getRootProject(); Project rootProject = project.getRootProject();
@ -82,47 +86,6 @@ public class TestClustersPlugin implements Plugin<Project> {
// create DSL for tasks to mark clusters these use // create DSL for tasks to mark clusters these use
createUseClusterTaskExtension(project, container); createUseClusterTaskExtension(project, container);
if (rootProject.getConfigurations().findByName(HELPER_CONFIGURATION_NAME) == null) {
// We use a single configuration on the root project to resolve all testcluster dependencies ( like distros )
// at once, only once without the need to repeat it for each project. This pays off assuming that most
// projects use the same dependencies.
Configuration helperConfiguration = project.getRootProject().getConfigurations().create(HELPER_CONFIGURATION_NAME);
helperConfiguration.setDescription(
"Internal helper configuration used by cluster configuration to download " +
"ES distributions and plugins."
);
// We have a single task to sync the helper configuration to "artifacts dir"
// the clusters will look for artifacts there based on the naming conventions.
// Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in
// the build.
rootProject.getTasks().create(SYNC_ARTIFACTS_TASK_NAME, sync -> {
sync.getInputs().files((Callable<FileCollection>) helperConfiguration::getAsFileTree);
sync.getOutputs().dir(new File(project.getRootProject().getBuildDir(), "testclusters/extract"));
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
sync.doLast(new Action<Task>() {
@Override
public void execute(Task task) {
project.sync(spec ->
helperConfiguration.getResolvedConfiguration().getResolvedArtifacts().forEach(resolvedArtifact -> {
final FileTree files;
File file = resolvedArtifact.getFile();
if (file.getName().endsWith(".zip")) {
files = project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
files = project.tarTree(file);
} else {
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
}
spec.from(files).into(new File(project.getRootProject().getBuildDir(), "testclusters/extract") + "/" +
resolvedArtifact.getModuleVersion().getId().getGroup()
);
}));
}
});
});
}
// When we know what tasks will run, we claim the clusters of those task to differentiate between clusters // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters
// that are defined in the build script and the ones that will actually be used in this invocation of gradle // that are defined in the build script and the ones that will actually be used in this invocation of gradle
// we use this information to determine when the last task that required the cluster executed so that we can // we use this information to determine when the last task that required the cluster executed so that we can
@ -143,6 +106,10 @@ public class TestClustersPlugin implements Plugin<Project> {
autoConfigureClusterDependencies(project, rootProject, container); autoConfigureClusterDependencies(project, rootProject, container);
} }
private static File getExtractDir(Project project) {
return new File(project.getRootProject().getBuildDir(), "testclusters/extract/");
}
private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) { private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) {
// Create an extensions that allows describing clusters // Create an extensions that allows describing clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = project.container( NamedDomainObjectContainer<ElasticsearchCluster> container = project.container(
@ -290,12 +257,59 @@ public class TestClustersPlugin implements Plugin<Project> {
Project rootProject, Project rootProject,
NamedDomainObjectContainer<ElasticsearchCluster> container NamedDomainObjectContainer<ElasticsearchCluster> container
) { ) {
// Download integ test distribution from maven central
MavenArtifactRepository mavenCentral = project.getRepositories().mavenCentral();
mavenCentral.content(spec -> {
spec.includeGroupByRegex("org\\.elasticsearch\\.distribution\\..*");
});
// Other distributions from the download service
project.getRepositories().add(
project.getRepositories().ivy(spec -> {
spec.setUrl("https://artifacts.elastic.co/downloads");
spec.patternLayout(p -> p.artifact("elasticsearch/[module]-[revision](-[classifier]).[ext]"));
HttpHeaderCredentials headerConfig = spec.getCredentials(HttpHeaderCredentials.class);
headerConfig.setName("X-Elastic-No-KPI");
headerConfig.setValue("1");
spec.content(c-> c.includeGroupByRegex("org\\.elasticsearch\\.distribution\\..*"));
})
);
// We have a single task to sync the helper configuration to "artifacts dir"
// the clusters will look for artifacts there based on the naming conventions.
// Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in
// the build.
Task sync = Boilerplate.maybeCreate(rootProject.getTasks(), SYNC_ARTIFACTS_TASK_NAME, onCreate -> {
onCreate.getOutputs().dir(getExtractDir(rootProject));
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
onCreate.doFirst(new Action<Task>() {
@Override
public void execute(Task task) {
// Clean up the extract dir first to make sure we have no stale files from older
// previous builds of the same distribution
project.delete(getExtractDir(rootProject));
}
});
});
// When the project evaluated we know of all tasks that use clusters. // When the project evaluated we know of all tasks that use clusters.
// Each of these have to depend on the artifacts being synced. // Each of these have to depend on the artifacts being synced.
// We need afterEvaluate here despite the fact that container is a domain object, we can't implement this with // We need afterEvaluate here despite the fact that container is a domain object, we can't implement this with
// all because fields can change after the fact. // all because fields can change after the fact.
project.afterEvaluate(ip -> container.forEach(esCluster -> project.afterEvaluate(ip -> container.forEach(esCluster ->
esCluster.eachVersionedDistribution((version, distribution) -> { esCluster.eachVersionedDistribution((version, distribution) -> {
Configuration helperConfiguration = Boilerplate.maybeCreate(
rootProject.getConfigurations(),
getHelperConfigurationName(version),
onCreate ->
// We use a single configuration on the root project to resolve all testcluster dependencies ( like distros )
// at once, only once without the need to repeat it for each project. This pays off assuming that most
// projects use the same dependencies.
onCreate.setDescription(
"Internal helper configuration used by cluster configuration to download " +
"ES distributions and plugins for " + version
)
);
BwcVersions.UnreleasedVersionInfo unreleasedInfo; BwcVersions.UnreleasedVersionInfo unreleasedInfo;
final List<Version> unreleased; final List<Version> unreleased;
{ {
@ -320,29 +334,42 @@ public class TestClustersPlugin implements Plugin<Project> {
projectNotation.put("path", unreleasedInfo.gradleProjectPath); projectNotation.put("path", unreleasedInfo.gradleProjectPath);
projectNotation.put("configuration", distribution.getLiveConfiguration()); projectNotation.put("configuration", distribution.getLiveConfiguration());
rootProject.getDependencies().add( rootProject.getDependencies().add(
HELPER_CONFIGURATION_NAME, helperConfiguration.getName(),
project.getDependencies().project(projectNotation) project.getDependencies().project(projectNotation)
); );
} else { } else {
if (distribution.equals(Distribution.INTEG_TEST)) {
rootProject.getDependencies().add( rootProject.getDependencies().add(
HELPER_CONFIGURATION_NAME, "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + version helperConfiguration.getName(),
); distribution.getGroup() + ":" +
distribution.getArtifactName() + ":" +
version +
(distribution.getClassifier().isEmpty() ? "" : ":" + distribution.getClassifier()) + "@" +
distribution.getFileExtension());
}
sync.getInputs().files(helperConfiguration);
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
sync.doLast(new Action<Task>() {
@Override
public void execute(Task task) {
project.copy(spec ->
helperConfiguration.getResolvedConfiguration().getResolvedArtifacts().forEach(resolvedArtifact -> {
final FileTree files;
File file = resolvedArtifact.getFile();
if (file.getName().endsWith(".zip")) {
files = project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
files = project.tarTree(file);
} else { } else {
// declare dependencies to be downloaded from the download service. throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
// The BuildPlugin sets up the right repo for this to work
// TODO: move the repo definition in this plugin when ClusterFormationTasks is removed
String dependency = String.format(
"%s:%s:%s:%s@%s",
distribution.getGroup(),
distribution.getArtifactName(),
version,
distribution.getClassifier(),
distribution.getFileExtension()
);
rootProject.getDependencies().add(HELPER_CONFIGURATION_NAME, dependency);
} }
spec.from(files, s -> s.into(resolvedArtifact.getModuleVersion().getId().getGroup()));
spec.into(getExtractDir(project));
}));
} }
});
}))); })));
} }

View File

@ -18,14 +18,33 @@
*/ */
package org.elasticsearch.gradle.tool; package org.elasticsearch.gradle.tool;
import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project; import org.gradle.api.Project;
import org.gradle.api.plugins.JavaPluginConvention; import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.SourceSetContainer;
import java.util.Optional;
public abstract class Boilerplate { public abstract class Boilerplate {
public static SourceSetContainer getJavaSourceSets(Project project) { public static SourceSetContainer getJavaSourceSets(Project project) {
return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
} }
public static <T> T maybeCreate(NamedDomainObjectContainer<T> collection, String name) {
return Optional.ofNullable(collection.findByName(name))
.orElse(collection.create(name));
}
public static <T> T maybeCreate(NamedDomainObjectContainer<T> collection, String name, Action<T> action) {
return Optional.ofNullable(collection.findByName(name))
.orElseGet(() -> {
T result = collection.create(name);
action.execute(result);
return result;
});
}
} }

View File

@ -0,0 +1 @@
implementation-class=org.elasticsearch.gradle.JdkDownloadPlugin

View File

@ -99,6 +99,7 @@ public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
"buildscript {\n" + "buildscript {\n" +
" repositories {\n" + " repositories {\n" +
" maven {\n" + " maven {\n" +
" name = \"test\"\n" +
" url = '" + getLocalTestRepoPath() + "'\n" + " url = '" + getLocalTestRepoPath() + "'\n" +
" }\n" + " }\n" +
" }\n" + " }\n" +
@ -117,12 +118,14 @@ public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision"); String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision");
if (luceneSnapshotRepo != null) { if (luceneSnapshotRepo != null) {
luceneSnapshotRepo = " maven {\n" + luceneSnapshotRepo = " maven {\n" +
" url \"http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" + " name \"lucene-snapshots\"\n" +
" url \"https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" +
" }\n"; " }\n";
} }
writeBuildScript("\n" + writeBuildScript("\n" +
"repositories {\n" + "repositories {\n" +
" maven {\n" + " maven {\n" +
" name \"test\"\n" +
" url \"" + getLocalTestRepoPath() + "\"\n" + " url \"" + getLocalTestRepoPath() + "\"\n" +
" }\n" + " }\n" +
" flatDir {\n" + " flatDir {\n" +

View File

@ -0,0 +1,110 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import com.github.tomakehurst.wiremock.WireMockServer;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static com.github.tomakehurst.wiremock.client.WireMock.head;
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
import static org.hamcrest.CoreMatchers.equalTo;
public class JdkDownloadPluginIT extends GradleIntegrationTestCase {
private static final String FAKE_JDK_VERSION = "1.0.2+99";
private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)");
private static final Pattern NUM_CONFIGS_LOGLINE = Pattern.compile("NUM CONFIGS: (.*)");
public void testLinuxExtraction() throws IOException {
assertExtraction("getLinuxJdk", "linux", "bin/java");
}
public void testDarwinExtraction() throws IOException {
assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java");
}
public void testWindowsExtraction() throws IOException {
assertExtraction("getWindowsJdk", "windows", "bin/java");
}
public void testCrossProjectReuse() throws IOException {
runBuild("numConfigurations", "linux", result -> {
Matcher matcher = NUM_CONFIGS_LOGLINE.matcher(result.getOutput());
assertTrue("could not find num configs in output: " + result.getOutput(), matcher.find());
assertThat(Integer.parseInt(matcher.group(1)), equalTo(6)); // 3 import configs, 3 export configs
});
}
public void assertExtraction(String taskname, String platform, String javaBin) throws IOException {
runBuild(taskname, platform, result -> {
Matcher matcher = JDK_HOME_LOGLINE.matcher(result.getOutput());
assertTrue("could not find jdk home in output: " + result.getOutput(), matcher.find());
String jdkHome = matcher.group(1);
Path javaPath = Paths.get(jdkHome, javaBin);
assertTrue(javaPath.toString(), Files.exists(javaPath));
});
}
private void runBuild(String taskname, String platform, Consumer<BuildResult> assertions) throws IOException {
WireMockServer wireMock = new WireMockServer(0);
try {
String extension = platform.equals("windows") ? "zip" : "tar.gz";
String filename = "openjdk-1.0.2_" + platform + "-x64_bin." + extension;
wireMock.stubFor(head(urlEqualTo("/java/GA/jdk1/99/GPL/" + filename))
.willReturn(aResponse().withStatus(200)));
final byte[] filebytes;
try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream(filename)) {
filebytes = stream.readAllBytes();
}
wireMock.stubFor(get(urlEqualTo("/java/GA/jdk1/99/GPL/" + filename))
.willReturn(aResponse().withStatus(200).withBody(filebytes)));
wireMock.start();
GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir("jdk-download"))
.withArguments(taskname,
"-Dlocal.repo.path=" + getLocalTestRepoPath(),
"-Dtests.jdk_version=" + FAKE_JDK_VERSION,
"-Dtests.jdk_repo=" + wireMock.baseUrl())
.withPluginClasspath();
BuildResult result = runner.build();
assertions.accept(result);
} catch (Exception e) {
// for debugging
System.err.println("missed requests: " + wireMock.findUnmatchedRequests().getRequests());
throw e;
} finally {
wireMock.stop();
}
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import org.elasticsearch.gradle.test.GradleUnitTestCase;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.testfixtures.ProjectBuilder;
import org.junit.BeforeClass;
import static org.hamcrest.CoreMatchers.equalTo;
public class JdkDownloadPluginTests extends GradleUnitTestCase {
private static Project rootProject;
@BeforeClass
public static void setupRoot() {
rootProject = ProjectBuilder.builder().build();
}
public void testMissingVersion() {
assertJdkError(createProject(), "testjdk", null, "linux", "version not specified for jdk [testjdk]");
}
public void testMissingPlatform() {
assertJdkError(createProject(), "testjdk", "11.0.2+33", null, "platform not specified for jdk [testjdk]");
}
public void testUnknownPlatform() {
assertJdkError(createProject(), "testjdk", "11.0.2+33", "unknown",
"unknown platform [unknown] for jdk [testjdk], must be one of [linux, windows, darwin]");
}
public void testBadVersionFormat() {
assertJdkError(createProject(), "testjdk", "badversion", "linux", "malformed version [badversion] for jdk [testjdk]");
}
private void assertJdkError(Project project, String name, String version, String platform, String message) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createJdk(project, name, version, platform));
assertThat(e.getMessage(), equalTo(message));
}
private void createJdk(Project project, String name, String version, String platform) {
@SuppressWarnings("unchecked")
NamedDomainObjectContainer<Jdk> jdks = (NamedDomainObjectContainer<Jdk>) project.getExtensions().getByName("jdks");
jdks.create(name, jdk -> {
if (version != null) {
jdk.setVersion(version);
}
if (platform != null) {
jdk.setPlatform(platform);
}
}).finalizeValues();
}
private Project createProject() {
Project project = ProjectBuilder.builder().withParent(rootProject).build();
project.getPlugins().apply("elasticsearch.jdk-download");
return project;
}
}

View File

@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
import com.carrotsearch.randomizedtesting.RandomizedRunner; import com.carrotsearch.randomizedtesting.RandomizedRunner;
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import junit.framework.AssertionFailedError;
import org.junit.Assert; import org.junit.Assert;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -32,4 +33,24 @@ import org.junit.runner.RunWith;
}) })
@ThreadLeakLingering(linger = 5000) // wait for "Connection worker" to die @ThreadLeakLingering(linger = 5000) // wait for "Connection worker" to die
public abstract class BaseTestCase extends Assert { public abstract class BaseTestCase extends Assert {
// add expectThrows from junit 5
@FunctionalInterface
public interface ThrowingRunnable {
void run() throws Throwable;
}
public static <T extends Throwable> T expectThrows(Class<T> expectedType, ThrowingRunnable runnable) {
try {
runnable.run();
} catch (Throwable e) {
if (expectedType.isInstance(e)) {
return expectedType.cast(e);
}
AssertionFailedError assertion =
new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + e);
assertion.initCause(e);
throw assertion;
}
throw new AssertionFailedError("Expected exception "+ expectedType.getSimpleName() + " but no exception was thrown");
}
} }

View File

@ -103,6 +103,14 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase {
); );
} }
public void testReleased() {
BuildResult result = getTestClustersRunner("testReleased").build();
assertTaskSuccessful(result, ":testReleased");
assertStartedAndStoppedOnce(result, "releasedVersionDefault-1");
assertStartedAndStoppedOnce(result, "releasedVersionOSS-1");
assertStartedAndStoppedOnce(result, "releasedVersionIntegTest-1");
}
public void testIncremental() { public void testIncremental() {
BuildResult result = getTestClustersRunner("clean", ":user1").build(); BuildResult result = getTestClustersRunner("clean", ":user1").build();
assertTaskSuccessful(result, ":user1"); assertTaskSuccessful(result, ":user1");

View File

@ -16,6 +16,7 @@ repositories {
jcenter() jcenter()
repositories { repositories {
maven { maven {
name "local-repo"
url System.getProperty("local.repo.path") url System.getProperty("local.repo.path")
} }
} }

View File

@ -15,6 +15,7 @@ repositories {
jcenter() jcenter()
repositories { repositories {
maven { maven {
name "local"
url System.getProperty("local.repo.path") url System.getProperty("local.repo.path")
} }
} }

View File

@ -0,0 +1,15 @@
project.gradle.projectsEvaluated {
// wire the jdk repo to wiremock
String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
println rootProject.repositories.asMap.keySet()
IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVersion}")
repository.setUrl(fakeJdkRepo)
}
task numConfigurations {
doLast {
println "NUM CONFIGS: ${project.configurations.size()}"
}
}

View File

@ -0,0 +1,9 @@
evaluationDependsOn ':subproj'
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
jdks {
linux_jdk {
version = fakeJdkVersion
platform = "linux"
}
}

View File

@ -0,0 +1 @@
include 'subproj'

View File

@ -0,0 +1,41 @@
plugins {
id 'elasticsearch.jdk-download'
}
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
jdks {
linux {
version = fakeJdkVersion
platform = "linux"
}
darwin {
version = fakeJdkVersion
platform = "darwin"
}
windows {
version = fakeJdkVersion
platform = "windows"
}
}
task getLinuxJdk {
dependsOn jdks.linux
doLast {
println "JDK HOME: " + jdks.linux
}
}
task getDarwinJdk {
dependsOn jdks.darwin
doLast {
println "JDK HOME: " + jdks.darwin
}
}
task getWindowsJdk {
dependsOn jdks.windows
doLast {
println "JDK HOME: " + jdks.windows
}
}

View File

@ -9,16 +9,16 @@ allprojects { all ->
dir System.getProperty("test.local-test-downloads-path") dir System.getProperty("test.local-test-downloads-path")
} }
maven { maven {
name "local"
url System.getProperty("local.repo.path") url System.getProperty("local.repo.path")
} }
String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision") String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision")
if (luceneSnapshotRevision != null) { if (luceneSnapshotRevision != null) {
maven { maven {
url "http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision name "lucene-snapshots"
url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision
} }
} }
jcenter()
} }
if (project == rootProject || project.name == "alpha" || project.name == "bravo") { if (project == rootProject || project.name == "alpha" || project.name == "bravo") {
@ -58,6 +58,21 @@ testClusters {
javaHome = file(System.getProperty('java.home')) javaHome = file(System.getProperty('java.home'))
numberOfNodes = 3 numberOfNodes = 3
} }
releasedVersionDefault {
version = "7.0.0"
distribution = 'DEFAULT'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionOSS {
version = "7.0.0"
distribution = 'OSS'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionIntegTest {
version = "7.0.0"
distribution = 'INTEG_TEST'
javaHome = file(System.getProperty('java.home'))
}
} }
task multiNode { task multiNode {
@ -67,6 +82,17 @@ task multiNode {
} }
} }
task testReleased {
useCluster testClusters.releasedVersionDefault
useCluster testClusters.releasedVersionOSS
useCluster testClusters.releasedVersionIntegTest
doFirst {
println "$path: Cluster running @ ${testClusters.releasedVersionDefault.httpSocketURI}"
println "$path: Cluster running @ ${testClusters.releasedVersionOSS.httpSocketURI}"
println "$path: Cluster running @ ${testClusters.releasedVersionIntegTest.httpSocketURI}"
}
}
task printLog { task printLog {
useCluster testClusters.myTestCluster useCluster testClusters.myTestCluster
doFirst { doFirst {

View File

@ -14,6 +14,7 @@ repositories {
* - version 0.0.2 has the same class and one extra file just to make the jar different * - version 0.0.2 has the same class and one extra file just to make the jar different
*/ */
maven { maven {
name = "local-test"
url = file("sample_jars/build/testrepo") url = file("sample_jars/build/testrepo")
} }
jcenter() jcenter()

View File

@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
@ -57,11 +58,11 @@ final class DataFrameRequestConverters {
.addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId())) .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId()))
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
if (getRequest.getFrom() != null) { if (getRequest.getPageParams() != null && getRequest.getPageParams().getFrom() != null) {
request.addParameter("from", getRequest.getFrom().toString()); request.addParameter(PageParams.FROM.getPreferredName(), getRequest.getPageParams().getFrom().toString());
} }
if (getRequest.getSize() != null) { if (getRequest.getPageParams() != null && getRequest.getPageParams().getSize() != null) {
request.addParameter("size", getRequest.getSize().toString()); request.addParameter(PageParams.SIZE.getPreferredName(), getRequest.getPageParams().getSize().toString());
} }
return request; return request;
} }
@ -120,6 +121,13 @@ final class DataFrameRequestConverters {
.addPathPart(statsRequest.getId()) .addPathPart(statsRequest.getId())
.addPathPartAsIs("_stats") .addPathPartAsIs("_stats")
.build(); .build();
return new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getFrom() != null) {
request.addParameter(PageParams.FROM.getPreferredName(), statsRequest.getPageParams().getFrom().toString());
}
if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getSize() != null) {
request.addParameter(PageParams.SIZE.getPreferredName(), statsRequest.getPageParams().getSize().toString());
}
return request;
} }
} }

View File

@ -27,6 +27,7 @@ import org.apache.http.client.methods.HttpPut;
import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.http.nio.entity.NByteArrayEntity;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest; import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest; import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
@ -71,7 +72,6 @@ import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest; import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package org.elasticsearch.client.ml.job.util; package org.elasticsearch.client.core;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
@ -57,11 +57,11 @@ public class PageParams implements ToXContentObject {
this.size = size; this.size = size;
} }
public int getFrom() { public Integer getFrom() {
return from; return from;
} }
public int getSize() { public Integer getSize() {
return size; return size;
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.client.dataframe;
import org.elasticsearch.client.Validatable; import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException; import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.core.PageParams;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -29,10 +30,6 @@ import java.util.Optional;
public class GetDataFrameTransformRequest implements Validatable { public class GetDataFrameTransformRequest implements Validatable {
private final List<String> ids;
private Integer from;
private Integer size;
/** /**
* Helper method to create a request that will get ALL Data Frame Transforms * Helper method to create a request that will get ALL Data Frame Transforms
* @return new {@link GetDataFrameTransformRequest} object for the id "_all" * @return new {@link GetDataFrameTransformRequest} object for the id "_all"
@ -41,6 +38,9 @@ public class GetDataFrameTransformRequest implements Validatable {
return new GetDataFrameTransformRequest("_all"); return new GetDataFrameTransformRequest("_all");
} }
private final List<String> ids;
private PageParams pageParams;
public GetDataFrameTransformRequest(String... ids) { public GetDataFrameTransformRequest(String... ids) {
this.ids = Arrays.asList(ids); this.ids = Arrays.asList(ids);
} }
@ -49,20 +49,12 @@ public class GetDataFrameTransformRequest implements Validatable {
return ids; return ids;
} }
public Integer getFrom() { public PageParams getPageParams() {
return from; return pageParams;
} }
public void setFrom(Integer from) { public void setPageParams(PageParams pageParams) {
this.from = from; this.pageParams = pageParams;
}
public Integer getSize() {
return size;
}
public void setSize(Integer size) {
this.size = size;
} }
@Override @Override
@ -78,7 +70,7 @@ public class GetDataFrameTransformRequest implements Validatable {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(ids); return Objects.hash(ids, pageParams);
} }
@Override @Override
@ -91,6 +83,6 @@ public class GetDataFrameTransformRequest implements Validatable {
return false; return false;
} }
GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj; GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj;
return Objects.equals(ids, other.ids); return Objects.equals(ids, other.ids) && Objects.equals(pageParams, other.pageParams);
} }
} }

View File

@ -21,12 +21,14 @@ package org.elasticsearch.client.dataframe;
import org.elasticsearch.client.Validatable; import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException; import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.core.PageParams;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
public class GetDataFrameTransformStatsRequest implements Validatable { public class GetDataFrameTransformStatsRequest implements Validatable {
private final String id; private final String id;
private PageParams pageParams;
public GetDataFrameTransformStatsRequest(String id) { public GetDataFrameTransformStatsRequest(String id) {
this.id = id; this.id = id;
@ -36,6 +38,14 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
return id; return id;
} }
public PageParams getPageParams() {
return pageParams;
}
public void setPageParams(PageParams pageParams) {
this.pageParams = pageParams;
}
@Override @Override
public Optional<ValidationException> validate() { public Optional<ValidationException> validate() {
if (id == null) { if (id == null) {
@ -49,7 +59,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(id); return Objects.hash(id, pageParams);
} }
@Override @Override
@ -62,6 +72,6 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
return false; return false;
} }
GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj; GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj;
return Objects.equals(id, other.id); return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams);
} }
} }

View File

@ -74,7 +74,4 @@ public class QueryConfig implements ToXContentObject {
return Objects.equals(this.query, that.query); return Objects.equals(this.query, that.query);
} }
public boolean isValid() {
return this.query != null;
}
} }

View File

@ -74,7 +74,4 @@ public class AggregationConfig implements ToXContentObject {
return Objects.equals(this.aggregations, that.aggregations); return Objects.equals(this.aggregations, that.aggregations);
} }
public boolean isValid() {
return this.aggregations != null;
}
} }

View File

@ -138,10 +138,6 @@ public class GroupConfig implements ToXContentObject {
return groups; return groups;
} }
public boolean isValid() {
return this.groups != null;
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();

View File

@ -97,10 +97,6 @@ public class PivotConfig implements ToXContentObject {
return Objects.hash(groups, aggregationConfig); return Objects.hash(groups, aggregationConfig);
} }
public boolean isValid() {
return groups.isValid() && aggregationConfig.isValid();
}
public static Builder builder() { public static Builder builder() {
return new Builder(); return new Builder();
} }

View File

@ -20,9 +20,9 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.results.Result; import org.elasticsearch.client.ml.job.results.Result;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;

View File

@ -21,9 +21,9 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;

View File

@ -21,13 +21,12 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.client.ml.job.util.PageParams;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;

View File

@ -20,8 +20,8 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;

View File

@ -20,8 +20,8 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;

View File

@ -20,8 +20,8 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;

View File

@ -20,8 +20,8 @@ package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable; import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;

View File

@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
@ -43,7 +44,9 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
public class DataFrameRequestConvertersTests extends ESTestCase { public class DataFrameRequestConvertersTests extends ESTestCase {
@ -147,6 +150,23 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats")); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats"));
assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size"));
getStatsRequest.setPageParams(new PageParams(0, null));
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
assertThat(request.getParameters(), hasEntry("from", "0"));
assertEquals(null, request.getParameters().get("size"));
getStatsRequest.setPageParams(new PageParams(null, 50));
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
assertEquals(null, request.getParameters().get("from"));
assertThat(request.getParameters(), hasEntry("size", "50"));
getStatsRequest.setPageParams(new PageParams(0, 10));
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
} }
public void testGetDataFrameTransform() { public void testGetDataFrameTransform() {
@ -159,11 +179,19 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
assertFalse(request.getParameters().containsKey("from")); assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size")); assertFalse(request.getParameters().containsKey("size"));
getRequest.setFrom(0); getRequest.setPageParams(new PageParams(0, null));
getRequest.setSize(10);
request = DataFrameRequestConverters.getDataFrameTransform(getRequest); request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertEquals("0", request.getParameters().get("from")); assertThat(request.getParameters(), hasEntry("from", "0"));
assertEquals("10", request.getParameters().get("size")); assertEquals(null, request.getParameters().get("size"));
getRequest.setPageParams(new PageParams(null, 50));
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertEquals(null, request.getParameters().get("from"));
assertThat(request.getParameters(), hasEntry("size", "50"));
getRequest.setPageParams(new PageParams(0, 10));
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
} }
public void testGetDataFrameTransform_givenMulitpleIds() { public void testGetDataFrameTransform_givenMulitpleIds() {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.core.IndexerState; import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse; import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse;
@ -217,8 +218,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
assertThat(getResponse.getTransformConfigurations(), hasSize(2)); assertThat(getResponse.getTransformConfigurations(), hasSize(2));
assertEquals(transform, getResponse.getTransformConfigurations().get(1)); assertEquals(transform, getResponse.getTransformConfigurations().get(1));
getRequest.setFrom(0); getRequest.setPageParams(new PageParams(0,1));
getRequest.setSize(1);
getResponse = execute(getRequest, client::getDataFrameTransform, getResponse = execute(getRequest, client::getDataFrameTransform,
client::getDataFrameTransformAsync); client::getDataFrameTransformAsync);
assertNull(getResponse.getInvalidTransforms()); assertNull(getResponse.getInvalidTransforms());

View File

@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest; import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest; import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
@ -82,7 +83,6 @@ import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.client.ml.job.config.JobUpdateTests; import org.elasticsearch.client.ml.job.config.JobUpdateTests;
import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.config.MlFilterTests; import org.elasticsearch.client.ml.job.config.MlFilterTests;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.client;
import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse; import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesRequest;
@ -43,7 +44,6 @@ import org.elasticsearch.client.ml.job.results.AnomalyRecord;
import org.elasticsearch.client.ml.job.results.Bucket; import org.elasticsearch.client.ml.job.results.Bucket;
import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.Influencer;
import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.junit.After; import org.junit.After;

View File

@ -27,6 +27,7 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse; import org.elasticsearch.client.ml.CloseJobResponse;
@ -112,7 +113,6 @@ import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;

View File

@ -26,6 +26,7 @@ import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.core.IndexerState; import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse; import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse;
@ -585,8 +586,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// end::get-data-frame-transform-request // end::get-data-frame-transform-request
// tag::get-data-frame-transform-request-options // tag::get-data-frame-transform-request-options
request.setFrom(0); // <1> request.setPageParams(new PageParams(0, 100)); // <1>
request.setSize(100); // <2>
// end::get-data-frame-transform-request-options // end::get-data-frame-transform-request-options
// tag::get-data-frame-transform-execute // tag::get-data-frame-transform-execute

View File

@ -32,6 +32,7 @@ import org.elasticsearch.client.MachineLearningIT;
import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.MlTestStateCleaner;
import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse; import org.elasticsearch.client.ml.CloseJobResponse;
@ -137,7 +138,6 @@ import org.elasticsearch.client.ml.job.results.CategoryDefinition;
import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.Influencer;
import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml; package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.client.ml.util; package org.elasticsearch.client.ml.util;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.AbstractXContentTestCase;

View File

@ -61,7 +61,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
} }
if (jdk) { if (jdk) {
into('jdk') { into('jdk') {
with jdkFiles(platform) with jdkFiles(project, platform)
} }
} }
into('') { into('') {
@ -295,6 +295,10 @@ subprojects {
} }
} }
subprojects {
group = "org.elasticsearch.distribution.${name.startsWith("oss-") ? "oss" : "default"}"
}
/***************************************************************************** /*****************************************************************************
* Rest test config * * Rest test config *
*****************************************************************************/ *****************************************************************************/
@ -302,6 +306,8 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-test'
group = "org.elasticsearch.distribution.integ-test-zip"
integTest { integTest {
includePackaged = true includePackaged = true
} }
@ -321,23 +327,14 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
inputs.properties(project(':distribution').restTestExpansions) inputs.properties(project(':distribution').restTestExpansions)
MavenFilteringHack.filter(it, project(':distribution').restTestExpansions) MavenFilteringHack.filter(it, project(':distribution').restTestExpansions)
} }
}
/*****************************************************************************
* Maven config * // The integ-test-distribution is published to maven
*****************************************************************************/
configure(subprojects.findAll { it.name.contains('zip') }) {
// only zip distributions go to maven
BuildPlugin.configurePomGeneration(project) BuildPlugin.configurePomGeneration(project)
apply plugin: 'nebula.info-scm' apply plugin: 'nebula.info-scm'
apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
// note: the group must be correct before applying the nexus plugin, or
// it will capture the wrong value...
String subgroup = project.name == 'integ-test-zip' ? 'integ-test-zip' : 'zip'
project.group = "org.elasticsearch.distribution.${subgroup}"
// make the pom file name use elasticsearch instead of the project name // make the pom file name use elasticsearch instead of the project name
archivesBaseName = "elasticsearch${it.name.contains('oss') ? '-oss' : ''}" archivesBaseName = "elasticsearch${it.name.contains('oss') ? '-oss' : ''}"
@ -378,3 +375,4 @@ configure(subprojects.findAll { it.name.contains('zip') }) {
} }
} }
} }

View File

@ -17,18 +17,16 @@
* under the License. * under the License.
*/ */
import org.apache.tools.ant.filters.FixCrLfFilter
import org.elasticsearch.gradle.ConcatFilesTask import org.elasticsearch.gradle.ConcatFilesTask
import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.test.RunTask import org.elasticsearch.gradle.test.RunTask
import org.apache.tools.ant.filters.FixCrLfFilter
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.util.regex.Matcher
import java.util.regex.Pattern
/***************************************************************************** /*****************************************************************************
* Third party dependencies report * * Third party dependencies report *
*****************************************************************************/ *****************************************************************************/
@ -219,64 +217,6 @@ xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule ->
copyLog4jProperties(buildDefaultLog4jConfig, xpackModule) copyLog4jProperties(buildDefaultLog4jConfig, xpackModule)
} }
/*****************************************************************************
* JDKs *
*****************************************************************************/
// extract the bundled jdk version, broken into elements as: [feature, interim, update, build]
// Note the "patch" version is not yet handled here, as it has not yet been used by java.
Pattern JDK_VERSION = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)@([a-f0-9]{32})?")
Matcher jdkVersionMatcher = JDK_VERSION.matcher(VersionProperties.bundledJdk)
if (jdkVersionMatcher.matches() == false) {
throw new IllegalArgumentException("Malformed jdk version [" + VersionProperties.bundledJdk + "]")
}
String jdkVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "")
String jdkMajor = jdkVersionMatcher.group(1)
String jdkBuild = jdkVersionMatcher.group(3)
String hash = jdkVersionMatcher.group(4)
repositories {
// simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back
ivy {
url "https://download.oracle.com"
patternLayout {
artifact "java/GA/jdk${jdkMajor}/${jdkBuild}/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"
}
}
// current pattern since 12.0.1
ivy {
url "https://download.oracle.com"
patternLayout {
artifact "java/GA/jdk${jdkVersion}/${hash}/${jdkBuild}/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"
}
}
}
for (String platform : ['linux', 'darwin', 'windows']) {
String jdkConfigName = "jdk_${platform}"
Configuration jdkConfig = configurations.create(jdkConfigName)
String extension = platform.equals('windows') ? 'zip' : 'tar.gz'
dependencies.add(jdkConfigName, "jdk:${platform.equals('darwin') ? 'osx' : platform}:${jdkVersion}@${extension}")
int rootNdx = platform.equals('darwin') ? 2 : 1
Closure removeRootDir = {
it.eachFile { FileCopyDetails details ->
details.relativePath = new RelativePath(true, details.relativePath.segments[rootNdx..-1] as String[])
}
it.includeEmptyDirs false
}
String extractDir = "${buildDir}/jdks/openjdk-${jdkVersion}_${platform}"
project.task("extract${platform.capitalize()}Jdk", type: Copy) {
doFirst {
project.delete(extractDir)
}
into extractDir
if (extension.equals('zip')) {
from({ zipTree(jdkConfig.singleFile) }, removeRootDir)
} else {
from({ tarTree(resources.gzip(jdkConfig.singleFile)) }, removeRootDir)
}
}
}
// make sure we have a clean task since we aren't a java project, but we have tasks that // make sure we have a clean task since we aren't a java project, but we have tasks that
// put stuff in the build dir // put stuff in the build dir
task clean(type: Delete) { task clean(type: Delete) {
@ -284,6 +224,9 @@ task clean(type: Delete) {
} }
configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
apply plugin: 'elasticsearch.jdk-download'
// TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run... // TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run...
/***************************************************************************** /*****************************************************************************
* Properties to expand when copying packaging files * * Properties to expand when copying packaging files *
@ -422,9 +365,15 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
} }
} }
jdkFiles = { platform -> jdkFiles = { project, platform ->
copySpec { project.jdks {
from project(':distribution').tasks.getByName("extract${platform.capitalize()}Jdk") "bundled_${platform}" {
it.platform = platform
it.version = VersionProperties.bundledJdk
}
}
return copySpec {
from project.jdks."bundled_${platform}"
eachFile { FileCopyDetails details -> eachFile { FileCopyDetails details ->
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
details.mode = 0755 details.mode = 0755

View File

@ -17,34 +17,17 @@ dependencies {
ossDockerSource project(path: ":distribution:archives:oss-linux-tar") ossDockerSource project(path: ":distribution:archives:oss-linux-tar")
} }
ext.expansions = { oss -> ext.expansions = { oss, local ->
final String classifier = 'linux-x86_64' final String classifier = 'linux-x86_64'
final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz" final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
return [ return [
'elasticsearch' : elasticsearch, 'elasticsearch' : elasticsearch,
'license' : oss ? 'Apache-2.0' : 'Elastic License', 'license' : oss ? 'Apache-2.0' : 'Elastic License',
'source_elasticsearch': local() ? "COPY $elasticsearch /opt/" : "RUN cd /opt && curl --retry 8 -s -L -O https://artifacts.elastic.co/downloads/elasticsearch/${elasticsearch} && cd -", 'source_elasticsearch': local ? "COPY $elasticsearch /opt/" : "RUN cd /opt && curl --retry 8 -s -L -O https://artifacts.elastic.co/downloads/elasticsearch/${elasticsearch} && cd -",
'version' : VersionProperties.elasticsearch 'version' : VersionProperties.elasticsearch
] ]
} }
/*
* We need to be able to render a Dockerfile that references the official artifacts on https://artifacts.elastic.co. For this, we use a
* substitution in the Dockerfile template where we can either replace source_elasticsearch with a COPY from the Docker build context, or
* a RUN curl command to retrieve the artifact from https://artifacts.elastic.co. The system property build.docker.source, which can be
* either "local" (default) or "remote" controls which version of the Dockerfile is produced.
*/
private static boolean local() {
final String buildDockerSource = System.getProperty("build.docker.source")
if (buildDockerSource == null || "local".equals(buildDockerSource)) {
return true
} else if ("remote".equals(buildDockerSource)) {
return false
} else {
throw new IllegalArgumentException("expected build.docker.source to be [local] or [remote] but was [" + buildDockerSource + "]")
}
}
private static String files(final boolean oss) { private static String files(final boolean oss) {
return "build/${ oss ? 'oss-' : ''}docker" return "build/${ oss ? 'oss-' : ''}docker"
} }
@ -53,19 +36,31 @@ private static String taskName(final String prefix, final boolean oss, final Str
return "${prefix}${oss ? 'Oss' : ''}${suffix}" return "${prefix}${oss ? 'Oss' : ''}${suffix}"
} }
void addCopyDockerContextTask(final boolean oss) { project.ext {
task(taskName("copy", oss, "DockerContext"), type: Sync) { dockerBuildContext = { boolean oss, boolean local ->
into files(oss) copySpec {
into('bin') { into('bin') {
from 'src/docker/bin' from project.projectDir.toPath().resolve("src/docker/bin")
} }
into('config') { into('config') {
from 'src/docker/config' from project.projectDir.toPath().resolve("src/docker/config")
} }
if (local()) { from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
MavenFilteringHack.filter(it, expansions(oss, local))
}
}
}
}
void addCopyDockerContextTask(final boolean oss) {
task(taskName("copy", oss, "DockerContext"), type: Sync) {
inputs.properties(expansions(oss, true))
into files(oss)
with dockerBuildContext(oss, true)
if (oss) { if (oss) {
from configurations.ossDockerSource from configurations.ossDockerSource
} else { } else {
@ -74,19 +69,6 @@ void addCopyDockerContextTask(final boolean oss) {
from configurations.dockerPlugins from configurations.dockerPlugins
} }
}
}
void addCopyDockerfileTask(final boolean oss) {
task(taskName("copy", oss, "Dockerfile"), type: Copy) {
dependsOn taskName("copy", oss, "DockerContext")
inputs.properties(expansions(oss)) // ensure task is run when ext.expansions is changed
into files(oss)
from('src/docker/Dockerfile') {
MavenFilteringHack.filter(it, expansions(oss))
}
}
} }
preProcessFixture { preProcessFixture {
@ -104,7 +86,6 @@ check.dependsOn postProcessFixture
void addBuildDockerImage(final boolean oss) { void addBuildDockerImage(final boolean oss) {
final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) { final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) {
dependsOn taskName("copy", oss, "DockerContext") dependsOn taskName("copy", oss, "DockerContext")
dependsOn taskName("copy", oss, "Dockerfile")
List<String> tags List<String> tags
if (oss) { if (oss) {
tags = [ tags = [
@ -132,7 +113,6 @@ void addBuildDockerImage(final boolean oss) {
for (final boolean oss : [false, true]) { for (final boolean oss : [false, true]) {
addCopyDockerContextTask(oss) addCopyDockerContextTask(oss)
addCopyDockerfileTask(oss)
addBuildDockerImage(oss) addBuildDockerImage(oss)
} }

View File

@ -0,0 +1,11 @@
apply plugin: 'base'
task buildDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch"
with dockerBuildContext(false, false)
}
assemble.dependsOn buildDockerBuildContext

View File

@ -0,0 +1,11 @@
apply plugin: 'base'
task buildOssDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss"
with dockerBuildContext(true, false)
}
assemble.dependsOn buildOssDockerBuildContext

View File

@ -53,6 +53,7 @@ import java.util.regex.Pattern
buildscript { buildscript {
repositories { repositories {
maven { maven {
name "gradle-plugins"
url "https://plugins.gradle.org/m2/" url "https://plugins.gradle.org/m2/"
} }
} }
@ -142,7 +143,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
} }
if (jdk) { if (jdk) {
into('jdk') { into('jdk') {
with jdkFiles('linux') with jdkFiles(project, 'linux')
} }
} }
// we need to specify every intermediate directory in these paths so the package managers know they are explicitly // we need to specify every intermediate directory in these paths so the package managers know they are explicitly

View File

@ -94,11 +94,15 @@ elif [ "$RESTART_ON_UPGRADE" = "true" ]; then
fi fi
# the equivalent code for rpm is in posttrans # the equivalent code for rpm is in posttrans
if [ "$PACKAGE" = "deb" -a ! -f /etc/elasticsearch/elasticsearch.keystore ]; then if [ "$PACKAGE" = "deb" ]; then
if [ ! -f /etc/elasticsearch/elasticsearch.keystore ]; then
/usr/share/elasticsearch/bin/elasticsearch-keystore create /usr/share/elasticsearch/bin/elasticsearch-keystore create
chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore
chmod 660 /etc/elasticsearch/elasticsearch.keystore chmod 660 /etc/elasticsearch/elasticsearch.keystore
md5sum /etc/elasticsearch/elasticsearch.keystore > /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum md5sum /etc/elasticsearch/elasticsearch.keystore > /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum
else
/usr/share/elasticsearch/bin/elasticsearch-keystore upgrade
fi
fi fi
${scripts.footer} ${scripts.footer}

View File

@ -3,6 +3,8 @@ if [ ! -f /etc/elasticsearch/elasticsearch.keystore ]; then
chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore
chmod 660 /etc/elasticsearch/elasticsearch.keystore chmod 660 /etc/elasticsearch/elasticsearch.keystore
md5sum /etc/elasticsearch/elasticsearch.keystore > /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum md5sum /etc/elasticsearch/elasticsearch.keystore > /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum
else
/usr/share/elasticsearch/bin/elasticsearch-keystore upgrade
fi fi
${scripts.footer} ${scripts.footer}

View File

@ -56,9 +56,6 @@ integTestCluster {
extraConfigFile 'hunspell/en_US/en_US.dic', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic' extraConfigFile 'hunspell/en_US/en_US.dic', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic'
// Whitelist reindexing from the local node so we can test it. // Whitelist reindexing from the local node so we can test it.
setting 'reindex.remote.whitelist', '127.0.0.1:*' setting 'reindex.remote.whitelist', '127.0.0.1:*'
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
systemProperty 'es.scripting.update.ctx_in_params', 'false'
} }
// build the cluster with all plugins // build the cluster with all plugins

View File

@ -57,7 +57,7 @@ For Maven:
<repository> <repository>
<id>elastic-lucene-snapshots</id> <id>elastic-lucene-snapshots</id>
<name>Elastic Lucene Snapshots</name> <name>Elastic Lucene Snapshots</name>
<url>http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/00142c9</url> <url>https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/00142c9</url>
<releases><enabled>true</enabled></releases> <releases><enabled>true</enabled></releases>
<snapshots><enabled>false</enabled></snapshots> <snapshots><enabled>false</enabled></snapshots>
</repository> </repository>
@ -68,7 +68,8 @@ For Gradle:
["source","groovy",subs="attributes"] ["source","groovy",subs="attributes"]
-------------------------------------------------- --------------------------------------------------
maven { maven {
url 'http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/00142c9' name "lucene-snapshots"
url 'https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/00142c9'
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -13,7 +13,7 @@ The API accepts a +{request}+ object and returns a +{response}+.
==== Get Data Frame Request ==== Get Data Frame Request
A +{request}+ requires either a data frame transform id, a comma separated list of ids or A +{request}+ requires either a data frame transform id, a comma separated list of ids or
the special wildcard `_all` to get all {dataframe-transform}s the special wildcard `_all` to get all {dataframe-transforms}
["source","java",subs="attributes,callouts,macros"] ["source","java",subs="attributes,callouts,macros"]
-------------------------------------------------- --------------------------------------------------
@ -29,8 +29,10 @@ The following arguments are optional.
-------------------------------------------------- --------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request-options] include-tagged::{doc-tests-file}[{api}-request-options]
-------------------------------------------------- --------------------------------------------------
<1> Page {dataframe-transform}s starting from this value <1> The page parameters `from` and `size`. `from` specifies the number of
<2> Return at most `size` {dataframe-transform}s {dataframe-transforms} to skip. `size` specifies the maximum number of
{dataframe-transforms} to get. Defaults to `0` and `100` respectively.
include::../execution.asciidoc[] include::../execution.asciidoc[]

View File

@ -93,7 +93,7 @@ For Maven:
<repository> <repository>
<id>elastic-lucene-snapshots</id> <id>elastic-lucene-snapshots</id>
<name>Elastic Lucene Snapshots</name> <name>Elastic Lucene Snapshots</name>
<url>http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/83f9835</url> <url>https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/83f9835</url>
<releases><enabled>true</enabled></releases> <releases><enabled>true</enabled></releases>
<snapshots><enabled>false</enabled></snapshots> <snapshots><enabled>false</enabled></snapshots>
</repository> </repository>
@ -104,7 +104,8 @@ For Gradle:
["source","groovy",subs="attributes"] ["source","groovy",subs="attributes"]
-------------------------------------------------- --------------------------------------------------
maven { maven {
url 'http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/83f9835' name 'lucene-snapshots'
url 'https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/83f9835'
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -10,7 +10,7 @@
|Aggs Reduce | <<painless-api-reference-shared, Shared API>> | |Aggs Reduce | <<painless-api-reference-shared, Shared API>> |
|Analysis | <<painless-api-reference-shared, Shared API>> | <<painless-api-reference-analysis, Specialized API>> |Analysis | <<painless-api-reference-shared, Shared API>> | <<painless-api-reference-analysis, Specialized API>>
|Bucket Aggregation | <<painless-api-reference-shared, Shared API>> | |Bucket Aggregation | <<painless-api-reference-shared, Shared API>> |
|Field | <<painless-api-reference-shared, Shared API>> | |Field | <<painless-api-reference-shared, Shared API>> | <<painless-api-reference-field, Specialized API>>
|Filter | <<painless-api-reference-shared, Shared API>> | |Filter | <<painless-api-reference-shared, Shared API>> |
|Ingest | <<painless-api-reference-shared, Shared API>> | <<painless-api-reference-ingest, Specialized API>> |Ingest | <<painless-api-reference-shared, Shared API>> | <<painless-api-reference-ingest, Specialized API>>
|Interval | <<painless-api-reference-shared, Shared API>> | |Interval | <<painless-api-reference-shared, Shared API>> |
@ -33,6 +33,7 @@
include::painless-api-reference-shared/index.asciidoc[] include::painless-api-reference-shared/index.asciidoc[]
include::painless-api-reference-analysis/index.asciidoc[] include::painless-api-reference-analysis/index.asciidoc[]
include::painless-api-reference-field/index.asciidoc[]
include::painless-api-reference-ingest/index.asciidoc[] include::painless-api-reference-ingest/index.asciidoc[]
include::painless-api-reference-moving-function/index.asciidoc[] include::painless-api-reference-moving-function/index.asciidoc[]
include::painless-api-reference-score/index.asciidoc[] include::painless-api-reference-score/index.asciidoc[]

View File

@ -7,6 +7,10 @@ The following specialized API is available in the Analysis context.
* See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts. * See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts.
==== Classes By Package
The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields.
==== org.elasticsearch.analysis.common ==== org.elasticsearch.analysis.common
<<painless-api-reference-analysis-org-elasticsearch-analysis-common, Expand details for org.elasticsearch.analysis.common>> <<painless-api-reference-analysis-org-elasticsearch-analysis-common, Expand details for org.elasticsearch.analysis.common>>

View File

@ -3,7 +3,7 @@
[role="exclude",id="painless-api-reference-analysis-org-elasticsearch-analysis-common"] [role="exclude",id="painless-api-reference-analysis-org-elasticsearch-analysis-common"]
=== Analysis API for package org.elasticsearch.analysis.common === Analysis API for package org.elasticsearch.analysis.common
See the <<painless-api-reference-analysis, Analysis API>> for a high-level overview of all packages. See the <<painless-api-reference-analysis, Analysis API>> for a high-level overview of all packages and classes.
[[painless-api-reference-analysis-AnalysisPredicateScript-Token]] [[painless-api-reference-analysis-AnalysisPredicateScript-Token]]
==== AnalysisPredicateScript.Token ==== AnalysisPredicateScript.Token

View File

@ -0,0 +1,17 @@
// This file is auto-generated. Do not edit.
[[painless-api-reference-field]]
=== Field API
The following specialized API is available in the Field context.
* See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts.
==== Static Methods
The following methods are directly callable without a class/instance qualifier. Note parameters denoted by a (*) are treated as read-only values.
* List domainSplit(String)
* List domainSplit(String, Map)
include::packages.asciidoc[]

View File

@ -0,0 +1,3 @@
// This file is auto-generated. Do not edit.

View File

@ -7,6 +7,10 @@ The following specialized API is available in the Ingest context.
* See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts. * See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts.
==== Classes By Package
The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields.
==== org.elasticsearch.ingest.common ==== org.elasticsearch.ingest.common
<<painless-api-reference-ingest-org-elasticsearch-ingest-common, Expand details for org.elasticsearch.ingest.common>> <<painless-api-reference-ingest-org-elasticsearch-ingest-common, Expand details for org.elasticsearch.ingest.common>>

View File

@ -3,7 +3,7 @@
[role="exclude",id="painless-api-reference-ingest-org-elasticsearch-ingest-common"] [role="exclude",id="painless-api-reference-ingest-org-elasticsearch-ingest-common"]
=== Ingest API for package org.elasticsearch.ingest.common === Ingest API for package org.elasticsearch.ingest.common
See the <<painless-api-reference-ingest, Ingest API>> for a high-level overview of all packages. See the <<painless-api-reference-ingest, Ingest API>> for a high-level overview of all packages and classes.
[[painless-api-reference-ingest-Processors]] [[painless-api-reference-ingest-Processors]]
==== Processors ==== Processors

View File

@ -7,6 +7,10 @@ The following specialized API is available in the Moving Function context.
* See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts. * See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts.
==== Classes By Package
The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields.
==== org.elasticsearch.search.aggregations.pipeline ==== org.elasticsearch.search.aggregations.pipeline
<<painless-api-reference-moving-function-org-elasticsearch-search-aggregations-pipeline, Expand details for org.elasticsearch.search.aggregations.pipeline>> <<painless-api-reference-moving-function-org-elasticsearch-search-aggregations-pipeline, Expand details for org.elasticsearch.search.aggregations.pipeline>>

View File

@ -3,7 +3,7 @@
[role="exclude",id="painless-api-reference-moving-function-org-elasticsearch-search-aggregations-pipeline"] [role="exclude",id="painless-api-reference-moving-function-org-elasticsearch-search-aggregations-pipeline"]
=== Moving Function API for package org.elasticsearch.search.aggregations.pipeline === Moving Function API for package org.elasticsearch.search.aggregations.pipeline
See the <<painless-api-reference-moving-function, Moving Function API>> for a high-level overview of all packages. See the <<painless-api-reference-moving-function, Moving Function API>> for a high-level overview of all packages and classes.
[[painless-api-reference-moving-function-MovingFunctions]] [[painless-api-reference-moving-function-MovingFunctions]]
==== MovingFunctions ==== MovingFunctions

View File

@ -7,6 +7,31 @@ The following specialized API is available in the Score context.
* See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts. * See the <<painless-api-reference-shared, Shared API>> for further API available in all contexts.
==== Static Methods
The following methods are directly callable without a class/instance qualifier. Note parameters denoted by a (*) are treated as read-only values.
* double cosineSimilarity(List *, VectorScriptDocValues.DenseVectorScriptDocValues)
* double cosineSimilaritySparse(Map *, VectorScriptDocValues.SparseVectorScriptDocValues)
* double decayDateExp(String *, String *, String *, double *, JodaCompatibleZonedDateTime)
* double decayDateGauss(String *, String *, String *, double *, JodaCompatibleZonedDateTime)
* double decayDateLinear(String *, String *, String *, double *, JodaCompatibleZonedDateTime)
* double decayGeoExp(String *, String *, String *, double *, GeoPoint)
* double decayGeoGauss(String *, String *, String *, double *, GeoPoint)
* double decayGeoLinear(String *, String *, String *, double *, GeoPoint)
* double decayNumericExp(double *, double *, double *, double *, double)
* double decayNumericGauss(double *, double *, double *, double *, double)
* double decayNumericLinear(double *, double *, double *, double *, double)
* double dotProduct(List, VectorScriptDocValues.DenseVectorScriptDocValues)
* double dotProductSparse(Map *, VectorScriptDocValues.SparseVectorScriptDocValues)
* double randomScore(int *)
* double randomScore(int *, String *)
* double saturation(double, double)
* double sigmoid(double, double, double)
==== Classes By Package
The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields.
==== org.elasticsearch.index.query ==== org.elasticsearch.index.query
<<painless-api-reference-score-org-elasticsearch-index-query, Expand details for org.elasticsearch.index.query>> <<painless-api-reference-score-org-elasticsearch-index-query, Expand details for org.elasticsearch.index.query>>

View File

@ -3,7 +3,7 @@
[role="exclude",id="painless-api-reference-score-org-elasticsearch-index-query"] [role="exclude",id="painless-api-reference-score-org-elasticsearch-index-query"]
=== Score API for package org.elasticsearch.index.query === Score API for package org.elasticsearch.index.query
See the <<painless-api-reference-score, Score API>> for a high-level overview of all packages. See the <<painless-api-reference-score, Score API>> for a high-level overview of all packages and classes.
[[painless-api-reference-score-VectorScriptDocValues]] [[painless-api-reference-score-VectorScriptDocValues]]
==== VectorScriptDocValues ==== VectorScriptDocValues

View File

@ -5,6 +5,10 @@
The following API is available in all contexts. The following API is available in all contexts.
==== Classes By Package
The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields.
==== java.lang ==== java.lang
<<painless-api-reference-shared-java-lang, Expand details for java.lang>> <<painless-api-reference-shared-java-lang, Expand details for java.lang>>

View File

@ -3,7 +3,7 @@
[role="exclude",id="painless-api-reference-shared-java-lang"] [role="exclude",id="painless-api-reference-shared-java-lang"]
=== Shared API for package java.lang === Shared API for package java.lang
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-Appendable]] [[painless-api-reference-shared-Appendable]]
==== Appendable ==== Appendable
@ -1399,7 +1399,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-math"] [role="exclude",id="painless-api-reference-shared-java-math"]
=== Shared API for package java.math === Shared API for package java.math
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-BigDecimal]] [[painless-api-reference-shared-BigDecimal]]
==== BigDecimal ==== BigDecimal
@ -1557,7 +1557,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-text"] [role="exclude",id="painless-api-reference-shared-java-text"]
=== Shared API for package java.text === Shared API for package java.text
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-Annotation]] [[painless-api-reference-shared-Annotation]]
==== Annotation ==== Annotation
@ -2265,7 +2265,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-time"] [role="exclude",id="painless-api-reference-shared-java-time"]
=== Shared API for package java.time === Shared API for package java.time
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-Clock]] [[painless-api-reference-shared-Clock]]
==== Clock ==== Clock
@ -3078,7 +3078,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-time-chrono"] [role="exclude",id="painless-api-reference-shared-java-time-chrono"]
=== Shared API for package java.time.chrono === Shared API for package java.time.chrono
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-AbstractChronology]] [[painless-api-reference-shared-AbstractChronology]]
==== AbstractChronology ==== AbstractChronology
@ -3675,7 +3675,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-time-format"] [role="exclude",id="painless-api-reference-shared-java-time-format"]
=== Shared API for package java.time.format === Shared API for package java.time.format
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-DateTimeFormatter]] [[painless-api-reference-shared-DateTimeFormatter]]
==== DateTimeFormatter ==== DateTimeFormatter
@ -3874,7 +3874,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-time-temporal"] [role="exclude",id="painless-api-reference-shared-java-time-temporal"]
=== Shared API for package java.time.temporal === Shared API for package java.time.temporal
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-ChronoField]] [[painless-api-reference-shared-ChronoField]]
==== ChronoField ==== ChronoField
@ -4166,7 +4166,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-time-zone"] [role="exclude",id="painless-api-reference-shared-java-time-zone"]
=== Shared API for package java.time.zone === Shared API for package java.time.zone
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-ZoneOffsetTransition]] [[painless-api-reference-shared-ZoneOffsetTransition]]
==== ZoneOffsetTransition ==== ZoneOffsetTransition
@ -4265,7 +4265,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-util"] [role="exclude",id="painless-api-reference-shared-java-util"]
=== Shared API for package java.util === Shared API for package java.util
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-AbstractCollection]] [[painless-api-reference-shared-AbstractCollection]]
==== AbstractCollection ==== AbstractCollection
@ -7194,7 +7194,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-util-function"] [role="exclude",id="painless-api-reference-shared-java-util-function"]
=== Shared API for package java.util.function === Shared API for package java.util.function
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-BiConsumer]] [[painless-api-reference-shared-BiConsumer]]
==== BiConsumer ==== BiConsumer
@ -7582,7 +7582,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-util-regex"] [role="exclude",id="painless-api-reference-shared-java-util-regex"]
=== Shared API for package java.util.regex === Shared API for package java.util.regex
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-Matcher]] [[painless-api-reference-shared-Matcher]]
==== Matcher ==== Matcher
@ -7635,7 +7635,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-java-util-stream"] [role="exclude",id="painless-api-reference-shared-java-util-stream"]
=== Shared API for package java.util.stream === Shared API for package java.util.stream
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-BaseStream]] [[painless-api-reference-shared-BaseStream]]
==== BaseStream ==== BaseStream
@ -7957,7 +7957,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-apache-lucene-util"] [role="exclude",id="painless-api-reference-shared-org-apache-lucene-util"]
=== Shared API for package org.apache.lucene.util === Shared API for package org.apache.lucene.util
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-BytesRef]] [[painless-api-reference-shared-BytesRef]]
==== BytesRef ==== BytesRef
@ -7974,7 +7974,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-common-geo"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-common-geo"]
=== Shared API for package org.elasticsearch.common.geo === Shared API for package org.elasticsearch.common.geo
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-GeoPoint]] [[painless-api-reference-shared-GeoPoint]]
==== GeoPoint ==== GeoPoint
@ -7987,7 +7987,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-fielddata"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-fielddata"]
=== Shared API for package org.elasticsearch.index.fielddata === Shared API for package org.elasticsearch.index.fielddata
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-ScriptDocValues-Booleans]] [[painless-api-reference-shared-ScriptDocValues-Booleans]]
==== ScriptDocValues.Booleans ==== ScriptDocValues.Booleans
@ -8386,7 +8386,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-mapper"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-mapper"]
=== Shared API for package org.elasticsearch.index.mapper === Shared API for package org.elasticsearch.index.mapper
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-IpFieldMapper-IpFieldType-IpScriptDocValues]] [[painless-api-reference-shared-IpFieldMapper-IpFieldType-IpScriptDocValues]]
==== IpFieldMapper.IpFieldType.IpScriptDocValues ==== IpFieldMapper.IpFieldType.IpScriptDocValues
@ -8445,7 +8445,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-query"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-query"]
=== Shared API for package org.elasticsearch.index.query === Shared API for package org.elasticsearch.index.query
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-IntervalFilterScript-Interval]] [[painless-api-reference-shared-IntervalFilterScript-Interval]]
==== IntervalFilterScript.Interval ==== IntervalFilterScript.Interval
@ -8459,7 +8459,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-similarity"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-similarity"]
=== Shared API for package org.elasticsearch.index.similarity === Shared API for package org.elasticsearch.index.similarity
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-ScriptedSimilarity-Doc]] [[painless-api-reference-shared-ScriptedSimilarity-Doc]]
==== ScriptedSimilarity.Doc ==== ScriptedSimilarity.Doc
@ -8499,7 +8499,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-painless-api"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-painless-api"]
=== Shared API for package org.elasticsearch.painless.api === Shared API for package org.elasticsearch.painless.api
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-Debug]] [[painless-api-reference-shared-Debug]]
==== Debug ==== Debug
@ -8511,7 +8511,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-script"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-script"]
=== Shared API for package org.elasticsearch.script === Shared API for package org.elasticsearch.script
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-JodaCompatibleZonedDateTime]] [[painless-api-reference-shared-JodaCompatibleZonedDateTime]]
==== JodaCompatibleZonedDateTime ==== JodaCompatibleZonedDateTime
@ -8594,7 +8594,7 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
[role="exclude",id="painless-api-reference-shared-org-elasticsearch-search-lookup"] [role="exclude",id="painless-api-reference-shared-org-elasticsearch-search-lookup"]
=== Shared API for package org.elasticsearch.search.lookup === Shared API for package org.elasticsearch.search.lookup
See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages. See the <<painless-api-reference-shared, Shared API>> for a high-level overview of all packages and classes.
[[painless-api-reference-shared-FieldLookup]] [[painless-api-reference-shared-FieldLookup]]
==== FieldLookup ==== FieldLookup

View File

@ -116,9 +116,8 @@ And it'd respond:
duplicate of this token it has been removed from the token stream duplicate of this token it has been removed from the token stream
NOTE: The synonym and synonym_graph filters use their preceding analysis chain to NOTE: The synonym and synonym_graph filters use their preceding analysis chain to
parse and analyse their synonym lists, and ignore any token filters in the chain parse and analyse their synonym lists, and will throw an exception if that chain
that produce multiple tokens at the same position. This means that any filters contains token filters that produce multiple tokens at the same position.
within the multiplexer will be ignored for the purpose of synonyms. If you want to If you want to apply synonyms to a token stream containing a multiplexer, then you
use filters contained within the multiplexer for parsing synonyms (for example, to should append the synonym filter to each relevant multiplexer filter list, rather than
apply stemming to the synonym lists), then you should append the synonym filter placing it after the multiplexer in the main token chain definition.
to the relevant multiplexer filter list.

View File

@ -188,6 +188,10 @@ parsing synonyms, e.g. `asciifolding` will only produce the folded version of th
token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an
error. error.
If you need to build analyzers that include both multi-token filters and synonym
filters, consider using the <<analysis-multiplexer-tokenfilter,multiplexer>> filter,
with the multi-token filters in one branch and the synonym filter in the other.
WARNING: The synonym rules should not contain words that are removed by WARNING: The synonym rules should not contain words that are removed by
a filter that appears after in the chain (a `stop` filter for instance). a filter that appears after in the chain (a `stop` filter for instance).
Removing a term from a synonym rule breaks the matching at query time. Removing a term from a synonym rule breaks the matching at query time.

View File

@ -177,3 +177,7 @@ multiple versions of a token may choose which version of the token to emit when
parsing synonyms, e.g. `asciifolding` will only produce the folded version of the parsing synonyms, e.g. `asciifolding` will only produce the folded version of the
token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an
error. error.
If you need to build analyzers that include both multi-token filters and synonym
filters, consider using the <<analysis-multiplexer-tokenfilter,multiplexer>> filter,
with the multi-token filters in one branch and the synonym filter in the other.

View File

@ -36,7 +36,9 @@ eCommerce sample data:
-------------------------------------------------- --------------------------------------------------
POST _data_frame/transforms/_preview POST _data_frame/transforms/_preview
{ {
"source": "kibana_sample_data_ecommerce", "source": {
"index": "kibana_sample_data_ecommerce"
},
"pivot": { "pivot": {
"group_by": { "group_by": {
"customer_id": { "customer_id": {

View File

@ -287,4 +287,4 @@ See <<url-access-control>>.
[float] [float]
[[bulk-partial-responses]] [[bulk-partial-responses]]
=== Partial responses === Partial responses
To ensure fast responses, the multi search API will respond with partial results if one or more shards fail. See <<shard-failures, Shard failures>> for more information. To ensure fast responses, the bulk API will respond with partial results if one or more shards fail. See <<shard-failures, Shard failures>> for more information.

View File

@ -198,14 +198,37 @@ PUT my_index
PUT my_index/_doc/1 PUT my_index/_doc/1
{ {
"name": { "name": {
"first": "Alice", "first": "John",
"middle": "Mary", "middle": "Winston",
"last": "White" "last": "Lennon"
} }
} }
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
Note that the `path_match` and `path_unmatch` parameters match on object paths
in addition to leaf fields. As an example, indexing the following document will
result in an error because the `path_match` setting also matches the object
field `name.title`, which can't be mapped as text:
[source,js]
--------------------------------------------------
PUT my_index/_doc/2
{
"name": {
"first": "Paul",
"last": "McCartney",
"title": {
"value": "Sir",
"category": "order of chivalry"
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
// TEST[catch:bad_request]
[[template-variables]] [[template-variables]]
==== `{name}` and `{dynamic_type}` ==== `{name}` and `{dynamic_type}`

View File

@ -270,7 +270,8 @@ Elasticsearch 7.x::
* Specifying types in requests is deprecated. For instance, indexing a * Specifying types in requests is deprecated. For instance, indexing a
document no longer requires a document `type`. The new index APIs document no longer requires a document `type`. The new index APIs
are `PUT {index}/_doc/{id}` in case of explicit ids and `POST {index}/_doc` are `PUT {index}/_doc/{id}` in case of explicit ids and `POST {index}/_doc`
for auto-generated ids. for auto-generated ids. Note that in 7.0, `_doc` is a permanent part of the
path, and represents the endpoint name rather than the document type.
* The `include_type_name` parameter in the index creation, index template, * The `include_type_name` parameter in the index creation, index template,
and mapping APIs will default to `false`. Setting the parameter at all will and mapping APIs will default to `false`. Setting the parameter at all will
@ -554,6 +555,10 @@ GET index/_doc/1
// CONSOLE // CONSOLE
// TEST[continued] // TEST[continued]
NOTE: In 7.0, `_doc` represents the endpoint name instead of the document type.
The `_doc` component is a permanent part of the path for the document `index`,
`get`, and `delete` APIs going forward, and will not be removed in 8.0.
For API paths that contain both a type and endpoint name like `_update`, For API paths that contain both a type and endpoint name like `_update`,
in 7.0 the endpoint will immediately follow the index name: in 7.0 the endpoint will immediately follow the index name:

View File

@ -11,5 +11,6 @@ For information about how to upgrade your cluster, see <<setup-upgrade>>.
-- --
include::migrate_7_2.asciidoc[]
include::migrate_7_1.asciidoc[] include::migrate_7_1.asciidoc[]
include::migrate_7_0.asciidoc[] include::migrate_7_0.asciidoc[]

Some files were not shown because too many files have changed in this diff Show More