Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
13a0d822d0
|
@ -17,12 +17,22 @@
|
|||
* under the License.
|
||||
*/
|
||||
import java.nio.file.Files
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
plugins {
|
||||
id 'java-gradle-plugin'
|
||||
id 'groovy'
|
||||
}
|
||||
|
||||
gradlePlugin {
|
||||
plugins {
|
||||
simplePlugin {
|
||||
id = 'elasticsearch.clusterformation'
|
||||
implementationClass = 'org.elasticsearch.gradle.clusterformation.ClusterformationPlugin'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
group = 'org.elasticsearch.gradle'
|
||||
|
||||
String minimumGradleVersion = file('src/main/resources/minimumGradleVersion').text.trim()
|
||||
|
@ -166,7 +176,6 @@ if (project != rootProject) {
|
|||
it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'}
|
||||
}
|
||||
exclude "**/*Tests.class"
|
||||
include "**/*IT.class"
|
||||
testClassesDirs = sourceSets.test.output.classesDirs
|
||||
classpath = sourceSets.test.runtimeClasspath
|
||||
inputs.dir(file("src/testKit"))
|
||||
|
|
|
@ -22,8 +22,8 @@ package org.elasticsearch.gradle.precommit
|
|||
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Classpath
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
|
||||
/**
|
||||
* Runs CheckJarHell on a classpath.
|
||||
*/
|
||||
|
@ -35,9 +35,13 @@ public class JarHellTask extends LoggedExec {
|
|||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
File successMarker = new File(project.buildDir, 'markers/jarHell')
|
||||
File successMarker
|
||||
|
||||
@Classpath
|
||||
FileCollection classpath
|
||||
|
||||
public JarHellTask() {
|
||||
successMarker = new File(project.buildDir, 'markers/jarHell-' + getName())
|
||||
project.afterEvaluate {
|
||||
FileCollection classpath = project.sourceSets.test.runtimeClasspath
|
||||
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
||||
|
|
|
@ -31,7 +31,7 @@ class PrecommitTasks {
|
|||
|
||||
/** Adds a precommit task, which depends on non-test verification tasks. */
|
||||
public static Task create(Project project, boolean includeDependencyLicenses) {
|
||||
Configuration forbiddenApisConfiguration = project.configurations.create("forbiddenApisCliJar")
|
||||
project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5')
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ class PrecommitTasks {
|
|||
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
|
||||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('filepermissions', FilePermissionsTask.class),
|
||||
project.tasks.create('jarHell', JarHellTask.class),
|
||||
configureJarHell(project),
|
||||
configureThirdPartyAudit(project)
|
||||
]
|
||||
|
||||
|
@ -80,6 +80,12 @@ class PrecommitTasks {
|
|||
return project.tasks.create(precommitOptions)
|
||||
}
|
||||
|
||||
private static Task configureJarHell(Project project) {
|
||||
Task task = project.tasks.create('jarHell', JarHellTask.class)
|
||||
task.classpath = project.sourceSets.test.runtimeClasspath
|
||||
return task
|
||||
}
|
||||
|
||||
private static Task configureThirdPartyAudit(Project project) {
|
||||
ThirdPartyAuditTask thirdPartyAuditTask = project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch;
|
||||
|
||||
import org.gradle.api.Action;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.file.CopySpec;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.tasks.WorkResult;
|
||||
import org.gradle.process.ExecResult;
|
||||
import org.gradle.process.JavaExecSpec;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Facilitate access to Gradle services without a direct dependency on Project.
|
||||
*
|
||||
* In a future release Gradle will offer service injection, this adapter plays that role until that time.
|
||||
* It exposes the service methods that are part of the public API as the classes implementing them are not.
|
||||
* Today service injection is <a href="https://github.com/gradle/gradle/issues/2363">not available</a> for
|
||||
* extensions.
|
||||
*
|
||||
* Everything exposed here must be thread safe. That is the very reason why project is not passed in directly.
|
||||
*/
|
||||
public class GradleServicesAdapter {
|
||||
|
||||
public final Project project;
|
||||
|
||||
public GradleServicesAdapter(Project project) {
|
||||
this.project = project;
|
||||
}
|
||||
|
||||
public static GradleServicesAdapter getInstance(Project project) {
|
||||
return new GradleServicesAdapter(project);
|
||||
}
|
||||
|
||||
public WorkResult copy(Action<? super CopySpec> action) {
|
||||
return project.copy(action);
|
||||
}
|
||||
|
||||
public WorkResult sync(Action<? super CopySpec> action) {
|
||||
return project.sync(action);
|
||||
}
|
||||
|
||||
public ExecResult javaexec(Action<? super JavaExecSpec> action) {
|
||||
return project.javaexec(action);
|
||||
}
|
||||
|
||||
public FileTree zipTree(File zipPath) {
|
||||
return project.zipTree(zipPath);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
public enum Distribution {
|
||||
|
||||
INTEG_TEST("integ-test-zip"),
|
||||
ZIP("zip"),
|
||||
ZIP_OSS("zip-oss");
|
||||
|
||||
private final String name;
|
||||
|
||||
Distribution(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
import org.gradle.api.NamedDomainObjectContainer;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.execution.TaskActionListener;
|
||||
import org.gradle.api.execution.TaskExecutionListener;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.api.tasks.TaskState;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ClusterformationPlugin implements Plugin<Project> {
|
||||
|
||||
public static final String LIST_TASK_NAME = "listElasticSearchClusters";
|
||||
public static final String EXTENSION_NAME = "elasticSearchClusters";
|
||||
|
||||
private final Logger logger = Logging.getLogger(ClusterformationPlugin.class);
|
||||
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
NamedDomainObjectContainer<? extends ElasticsearchConfiguration> container = project.container(
|
||||
ElasticsearchNode.class,
|
||||
(name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project))
|
||||
);
|
||||
project.getExtensions().add(EXTENSION_NAME, container);
|
||||
|
||||
Task listTask = project.getTasks().create(LIST_TASK_NAME);
|
||||
listTask.setGroup("ES cluster formation");
|
||||
listTask.setDescription("Lists all ES clusters configured for this project");
|
||||
listTask.doLast((Task task) ->
|
||||
container.forEach((ElasticsearchConfiguration cluster) ->
|
||||
logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getDistribution())
|
||||
)
|
||||
);
|
||||
|
||||
Map<Task, List<ElasticsearchConfiguration>> taskToCluster = new HashMap<>();
|
||||
|
||||
// register an extension for all current and future tasks, so that any task can declare that it wants to use a
|
||||
// specific cluster.
|
||||
project.getTasks().all((Task task) ->
|
||||
task.getExtensions().findByType(ExtraPropertiesExtension.class)
|
||||
.set(
|
||||
"useCluster",
|
||||
new Closure<Void>(this, this) {
|
||||
public void doCall(ElasticsearchConfiguration conf) {
|
||||
taskToCluster.computeIfAbsent(task, k -> new ArrayList<>()).add(conf);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
project.getGradle().getTaskGraph().whenReady(taskExecutionGraph ->
|
||||
taskExecutionGraph.getAllTasks()
|
||||
.forEach(task ->
|
||||
taskToCluster.getOrDefault(task, Collections.emptyList()).forEach(ElasticsearchConfiguration::claim)
|
||||
)
|
||||
);
|
||||
project.getGradle().addListener(
|
||||
new TaskActionListener() {
|
||||
@Override
|
||||
public void beforeActions(Task task) {
|
||||
// we only start the cluster before the actions, so we'll not start it if the task is up-to-date
|
||||
taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::start);
|
||||
}
|
||||
@Override
|
||||
public void afterActions(Task task) {}
|
||||
}
|
||||
);
|
||||
project.getGradle().addListener(
|
||||
new TaskExecutionListener() {
|
||||
@Override
|
||||
public void afterExecute(Task task, TaskState state) {
|
||||
// always un-claim the cluster, even if _this_ task is up-to-date, as others might not have been and caused the
|
||||
// cluster to start.
|
||||
taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::unClaimAndStop);
|
||||
}
|
||||
@Override
|
||||
public void beforeExecute(Task task) {}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import org.elasticsearch.gradle.Distribution;
|
||||
import org.elasticsearch.gradle.Version;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
public interface ElasticsearchConfiguration {
|
||||
String getName();
|
||||
|
||||
Version getVersion();
|
||||
|
||||
void setVersion(Version version);
|
||||
|
||||
default void setVersion(String version) {
|
||||
setVersion(Version.fromString(version));
|
||||
}
|
||||
|
||||
Distribution getDistribution();
|
||||
|
||||
void setDistribution(Distribution distribution);
|
||||
|
||||
void claim();
|
||||
|
||||
Future<Void> start();
|
||||
|
||||
void unClaimAndStop();
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
import org.elasticsearch.gradle.Distribution;
|
||||
import org.elasticsearch.gradle.Version;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class ElasticsearchNode implements ElasticsearchConfiguration {
|
||||
|
||||
private final String name;
|
||||
private final GradleServicesAdapter services;
|
||||
private final AtomicInteger noOfClaims = new AtomicInteger();
|
||||
private final AtomicBoolean started = new AtomicBoolean(false);
|
||||
private final Logger logger = Logging.getLogger(ElasticsearchNode.class);
|
||||
|
||||
private Distribution distribution;
|
||||
private Version version;
|
||||
|
||||
public ElasticsearchNode(String name, GradleServicesAdapter services) {
|
||||
this.name = name;
|
||||
this.services = services;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Version getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setVersion(Version version) {
|
||||
checkNotRunning();
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Distribution getDistribution() {
|
||||
return distribution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDistribution(Distribution distribution) {
|
||||
checkNotRunning();
|
||||
this.distribution = distribution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void claim() {
|
||||
noOfClaims.incrementAndGet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the cluster if not running. Does nothing if the cluster is already running.
|
||||
*
|
||||
* @return future of thread running in the background
|
||||
*/
|
||||
@Override
|
||||
public Future<Void> start() {
|
||||
if (started.getAndSet(true)) {
|
||||
logger.lifecycle("Already started cluster: {}", name);
|
||||
} else {
|
||||
logger.lifecycle("Starting cluster: {}", name);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops a running cluster if it's not claimed. Does nothing otherwise.
|
||||
*/
|
||||
@Override
|
||||
public void unClaimAndStop() {
|
||||
int decrementedClaims = noOfClaims.decrementAndGet();
|
||||
if (decrementedClaims > 0) {
|
||||
logger.lifecycle("Not stopping {}, since cluster still has {} claim(s)", name, decrementedClaims);
|
||||
return;
|
||||
}
|
||||
if (started.get() == false) {
|
||||
logger.lifecycle("Asked to unClaimAndStop, but cluster was not running: {}", name);
|
||||
return;
|
||||
}
|
||||
logger.lifecycle("Stopping {}, number of claims is {}", name, decrementedClaims);
|
||||
}
|
||||
|
||||
private void checkNotRunning() {
|
||||
if (started.get()) {
|
||||
throw new IllegalStateException("Configuration can not be altered while running ");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ElasticsearchNode that = (ElasticsearchNode) o;
|
||||
return Objects.equals(name, that.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name);
|
||||
}
|
||||
}
|
|
@ -1 +1 @@
|
|||
4.9
|
||||
4.10
|
|
@ -153,17 +153,4 @@ public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private String getLocalTestRepoPath() {
|
||||
String property = System.getProperty("test.local-test-repo-path");
|
||||
Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests");
|
||||
File file = new File(property);
|
||||
assertTrue("Expected " + property + " to exist, but it did not!", file.exists());
|
||||
if (File.separator.equals("\\")) {
|
||||
// Use / on Windows too, the build script is not happy with \
|
||||
return file.getAbsolutePath().replace(File.separator, "/");
|
||||
} else {
|
||||
return file.getAbsolutePath();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withArguments("buildResources", "-s", "-i")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
assertTaskSuccessfull(result, ":buildResources");
|
||||
assertTaskSuccessful(result, ":buildResources");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
|
||||
|
||||
|
@ -61,8 +61,8 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertTaskSuccessfull(result, ":buildResources");
|
||||
assertTaskSuccessfull(result, ":sampleCopyAll");
|
||||
assertTaskSuccessful(result, ":buildResources");
|
||||
assertTaskSuccessful(result, ":sampleCopyAll");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle.xml");
|
||||
// This is a side effect of compile time reference
|
||||
assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle_suppressions.xml");
|
||||
|
@ -75,7 +75,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertTaskSuccessfull(result, ":sample");
|
||||
assertTaskSuccessful(result, ":sample");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
|
||||
}
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.clusterformation;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.gradle.testkit.runner.TaskOutcome;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class ClusterformationPluginIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testListClusters() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("listElasticSearchClusters", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":listElasticSearchClusters").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
" * myTestCluster:"
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
public void testUseClusterByOne() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("user1", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user1").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Starting cluster: myTestCluster",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
public void testUseClusterByOneWithDryRun() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("user1", "-s", "--dry-run")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertNull(result.task(":user1"));
|
||||
assertOutputDoesNotContain(
|
||||
result.getOutput(),
|
||||
"Starting cluster: myTestCluster",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
public void testUseClusterByTwo() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("user1", "user2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user1").getOutcome());
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user2").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Starting cluster: myTestCluster",
|
||||
"Not stopping myTestCluster, since cluster still has 1 claim(s)",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
public void testUseClusterByUpToDateTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("upToDate1", "upToDate2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.UP_TO_DATE, result.task(":upToDate1").getOutcome());
|
||||
assertEquals(TaskOutcome.UP_TO_DATE, result.task(":upToDate2").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Not stopping myTestCluster, since cluster still has 1 claim(s)",
|
||||
"cluster was not running: myTestCluster"
|
||||
);
|
||||
assertOutputDoesNotContain(result.getOutput(), "Starting cluster: myTestCluster");
|
||||
}
|
||||
|
||||
public void testUseClusterBySkippedTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("skipped1", "skipped2", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SKIPPED, result.task(":skipped1").getOutcome());
|
||||
assertEquals(TaskOutcome.SKIPPED, result.task(":skipped2").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Not stopping myTestCluster, since cluster still has 1 claim(s)",
|
||||
"cluster was not running: myTestCluster"
|
||||
);
|
||||
assertOutputDoesNotContain(result.getOutput(), "Starting cluster: myTestCluster");
|
||||
}
|
||||
|
||||
public void tetUseClusterBySkippedAndWorkingTask() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("clusterformation"))
|
||||
.withArguments("skipped1", "user1", "-s")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertEquals(TaskOutcome.SKIPPED, result.task(":skipped1").getOutcome());
|
||||
assertEquals(TaskOutcome.SUCCESS, result.task(":user1").getOutcome());
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"> Task :user1",
|
||||
"Starting cluster: myTestCluster",
|
||||
"Stopping myTestCluster, number of claims is 0"
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
public class JarHellTaskIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testJarHellDetected() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("jarHell"))
|
||||
.withArguments("clean", "precommit", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
|
||||
.withPluginClasspath()
|
||||
.buildAndFail();
|
||||
|
||||
assertTaskFailed(result, ":jarHell");
|
||||
assertOutputContains(
|
||||
result.getOutput(),
|
||||
"Exception in thread \"main\" java.lang.IllegalStateException: jar hell!",
|
||||
"class: org.apache.logging.log4j.Logger"
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -9,6 +9,7 @@ import java.io.File;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
@ -66,15 +67,24 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
protected void assertTaskSuccessfull(BuildResult result, String taskName) {
|
||||
protected void assertTaskFailed(BuildResult result, String taskName) {
|
||||
assertTaskOutcome(result, taskName, TaskOutcome.FAILED);
|
||||
}
|
||||
|
||||
protected void assertTaskSuccessful(BuildResult result, String taskName) {
|
||||
assertTaskOutcome(result, taskName, TaskOutcome.SUCCESS);
|
||||
}
|
||||
|
||||
private void assertTaskOutcome(BuildResult result, String taskName, TaskOutcome taskOutcome) {
|
||||
BuildTask task = result.task(taskName);
|
||||
if (task == null) {
|
||||
fail("Expected task `" + taskName + "` to be successful, but it did not run");
|
||||
fail("Expected task `" + taskName + "` to be " + taskOutcome +", but it did not run" +
|
||||
"\n\nOutput is:\n" + result.getOutput());
|
||||
}
|
||||
assertEquals(
|
||||
"Expected task to be successful but it was: " + task.getOutcome() +
|
||||
"\n\nOutput is:\n" + result.getOutput() ,
|
||||
TaskOutcome.SUCCESS,
|
||||
taskOutcome + "\n\nOutput is:\n" + result.getOutput() ,
|
||||
taskOutcome,
|
||||
task.getOutcome()
|
||||
);
|
||||
}
|
||||
|
@ -109,4 +119,17 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
|||
Files.exists(absPath)
|
||||
);
|
||||
}
|
||||
|
||||
protected String getLocalTestRepoPath() {
|
||||
String property = System.getProperty("test.local-test-repo-path");
|
||||
Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests");
|
||||
File file = new File(property);
|
||||
assertTrue("Expected " + property + " to exist, but it did not!", file.exists());
|
||||
if (File.separator.equals("\\")) {
|
||||
// Use / on Windows too, the build script is not happy with \
|
||||
return file.getAbsolutePath().replace(File.separator, "/");
|
||||
} else {
|
||||
return file.getAbsolutePath();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
plugins {
|
||||
id 'elasticsearch.clusterformation'
|
||||
}
|
||||
|
||||
elasticSearchClusters {
|
||||
myTestCluster {
|
||||
distribution = 'ZIP'
|
||||
}
|
||||
}
|
||||
|
||||
task user1 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
doLast {
|
||||
println "user1 executing"
|
||||
}
|
||||
}
|
||||
|
||||
task user2 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
doLast {
|
||||
println "user2 executing"
|
||||
}
|
||||
}
|
||||
|
||||
task upToDate1 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
||||
|
||||
task upToDate2 {
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
||||
|
||||
task skipped1 {
|
||||
enabled = false
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
||||
|
||||
task skipped2 {
|
||||
enabled = false
|
||||
useCluster elasticSearchClusters.myTestCluster
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
plugins {
|
||||
id 'java'
|
||||
id 'elasticsearch.build'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
dependenciesInfo.enabled = false
|
||||
forbiddenApisMain.enabled = false
|
||||
forbiddenApisTest.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
namingConventions.enabled = false
|
||||
ext.licenseFile = file("$buildDir/dummy/license")
|
||||
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
repositories {
|
||||
maven {
|
||||
url System.getProperty("local.repo.path")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// Needed for the JarHell task
|
||||
testCompile ("org.elasticsearch.test:framework:${versions.elasticsearch}")
|
||||
// causes jar hell with local sources
|
||||
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package org.apache.logging.log4j;
|
||||
|
||||
// Jar Hell !
|
||||
public class Logger {
|
||||
|
||||
}
|
||||
|
|
@ -90,6 +90,7 @@ import org.elasticsearch.action.support.WriteRequest;
|
|||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest;
|
||||
import org.elasticsearch.client.security.RefreshPolicy;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Priority;
|
||||
|
@ -1150,10 +1151,10 @@ final class RequestConverters {
|
|||
static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException {
|
||||
String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
@ -1436,11 +1437,16 @@ final class RequestConverters {
|
|||
|
||||
Params withRefresh(boolean refresh) {
|
||||
if (refresh) {
|
||||
return withRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
return withRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated If creating a new HLRC ReST API call, use {@link RefreshPolicy}
|
||||
* instead of {@link WriteRequest.RefreshPolicy} from the server project
|
||||
*/
|
||||
@Deprecated
|
||||
Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) {
|
||||
if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
|
||||
return putParam("refresh", refreshPolicy.getValue());
|
||||
|
@ -1448,6 +1454,13 @@ final class RequestConverters {
|
|||
return this;
|
||||
}
|
||||
|
||||
Params withRefreshPolicy(RefreshPolicy refreshPolicy) {
|
||||
if (refreshPolicy != RefreshPolicy.NONE) {
|
||||
return putParam("refresh", refreshPolicy.getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
Params withRetryOnConflict(int retryOnConflict) {
|
||||
if (retryOnConflict > 0) {
|
||||
return putParam("retry_on_conflict", String.valueOf(retryOnConflict));
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
/**
|
||||
* Enumeration of values that control the refresh policy for a request that
|
||||
* supports specifying a refresh policy.
|
||||
*/
|
||||
public enum RefreshPolicy {
|
||||
|
||||
/**
|
||||
* Don't refresh after this request. The default.
|
||||
*/
|
||||
NONE("false"),
|
||||
/**
|
||||
* Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful
|
||||
* to present a consistent view to for indices with very low traffic. And it is wonderful for tests!
|
||||
*/
|
||||
IMMEDIATE("true"),
|
||||
/**
|
||||
* Leave this request open until a refresh has made the contents of this request visible to search. This refresh policy is
|
||||
* compatible with high indexing and search throughput but it causes the request to wait to reply until a refresh occurs.
|
||||
*/
|
||||
WAIT_UNTIL("wait_for");
|
||||
|
||||
private final String value;
|
||||
|
||||
RefreshPolicy(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default refresh policy, which is <code>NONE</code>
|
||||
*/
|
||||
public static RefreshPolicy getDefault() {
|
||||
return RefreshPolicy.NONE;
|
||||
}
|
||||
}
|
|
@ -39,3 +39,9 @@ test {
|
|||
// TODO: find a way to add permissions for the tests in this module
|
||||
systemProperty 'tests.security.manager', 'false'
|
||||
}
|
||||
|
||||
if (project.inFipsJvm) {
|
||||
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
|
||||
// rather than provide a long list of exclusions, disable the check on FIPS.
|
||||
thirdPartyAudit.enabled = false
|
||||
}
|
||||
|
|
|
@ -1,71 +1,14 @@
|
|||
[[discovery-file]]
|
||||
=== File-Based Discovery Plugin
|
||||
|
||||
The file-based discovery plugin uses a list of hosts/ports in a `unicast_hosts.txt` file
|
||||
in the `config/discovery-file` directory for unicast discovery.
|
||||
The functionality provided by the `discovery-file` plugin is now available in
|
||||
Elasticsearch without requiring a plugin. This plugin still exists to ensure
|
||||
backwards compatibility, but it will be removed in a future version.
|
||||
|
||||
On installation, this plugin creates a file at
|
||||
`$ES_PATH_CONF/discovery-file/unicast_hosts.txt` that comprises comments that
|
||||
describe how to use it. It is preferable not to install this plugin and instead
|
||||
to create this file, and its containing directory, using standard tools.
|
||||
|
||||
:plugin_name: discovery-file
|
||||
include::install_remove.asciidoc[]
|
||||
|
||||
[[discovery-file-usage]]
|
||||
[float]
|
||||
==== Using the file-based discovery plugin
|
||||
|
||||
The file-based discovery plugin provides the ability to specify the
|
||||
unicast hosts list through a simple `unicast_hosts.txt` file that can
|
||||
be dynamically updated at any time. To enable, add the following in `elasticsearch.yml`:
|
||||
|
||||
[source,yaml]
|
||||
----
|
||||
discovery.zen.hosts_provider: file
|
||||
----
|
||||
|
||||
This plugin simply provides a facility to supply the unicast hosts list for
|
||||
zen discovery through an external file that can be updated at any time by a side process.
|
||||
|
||||
For example, this gives a convenient mechanism for an Elasticsearch instance
|
||||
that is run in docker containers to be dynamically supplied a list of IP
|
||||
addresses to connect to for zen discovery when those IP addresses may not be
|
||||
known at node startup.
|
||||
|
||||
Note that the file-based discovery plugin is meant to augment the unicast
|
||||
hosts list in `elasticsearch.yml` (if specified), not replace it. Therefore,
|
||||
if there are valid unicast host entries in `discovery.zen.ping.unicast.hosts`,
|
||||
they will be used in addition to those supplied in `unicast_hosts.txt`.
|
||||
|
||||
Anytime a change is made to the `unicast_hosts.txt` file, even as Elasticsearch
|
||||
continues to run, the new changes will be picked up by the plugin and the
|
||||
new hosts list will be used for the next pinging round for master election.
|
||||
|
||||
Upon installation of the plugin, a default `unicast_hosts.txt` file will
|
||||
be found in the `$CONFIG_DIR/discovery-file` directory. This default file
|
||||
will contain some comments about what the file should contain. All comments
|
||||
for this file must appear on their lines starting with `#` (i.e. comments
|
||||
cannot start in the middle of a line).
|
||||
|
||||
[[discovery-file-format]]
|
||||
[float]
|
||||
==== unicast_hosts.txt file format
|
||||
|
||||
The format of the file is to specify one unicast host entry per line.
|
||||
Each unicast host entry consists of the host (host name or IP address) and
|
||||
an optional transport port number. If the port number is specified, is must
|
||||
come immediately after the host (on the same line) separated by a `:`.
|
||||
If the port number is not specified, a default value of 9300 is used.
|
||||
|
||||
For example, this is an example of `unicast_hosts.txt` for a cluster with
|
||||
four nodes that participate in unicast discovery, some of which are not
|
||||
running on the default port:
|
||||
|
||||
[source,txt]
|
||||
----------------------------------------------------------------
|
||||
10.10.10.5
|
||||
10.10.10.6:9305
|
||||
10.10.10.5:10005
|
||||
# an IPv6 address
|
||||
[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:9301
|
||||
----------------------------------------------------------------
|
||||
|
||||
Host names are allowed instead of IP addresses (similar to
|
||||
`discovery.zen.ping.unicast.hosts`), and IPv6 addresses must be
|
||||
specified in brackets with the port coming after the brackets.
|
||||
|
|
|
@ -10,71 +10,66 @@ include::install_remove.asciidoc[]
|
|||
[[repository-gcs-usage]]
|
||||
==== Getting started
|
||||
|
||||
The plugin uses the https://cloud.google.com/storage/docs/json_api/[Google Cloud Storage JSON API] (v1)
|
||||
to connect to the Storage service. If this is the first time you use Google Cloud Storage, you first
|
||||
need to connect to the https://console.cloud.google.com/[Google Cloud Platform Console] and create a new
|
||||
project. Once your project is created, you must enable the Cloud Storage Service for your project.
|
||||
The plugin uses the https://github.com/GoogleCloudPlatform/google-cloud-java/tree/master/google-cloud-clients/google-cloud-storage[Google Cloud Java Client for Storage]
|
||||
to connect to the Storage service. If you are using
|
||||
https://cloud.google.com/storage/[Google Cloud Storage] for the first time, you
|
||||
must connect to the https://console.cloud.google.com/[Google Cloud Platform Console]
|
||||
and create a new project. After your project is created, you must enable the
|
||||
Cloud Storage Service for your project.
|
||||
|
||||
[[repository-gcs-creating-bucket]]
|
||||
===== Creating a Bucket
|
||||
|
||||
Google Cloud Storage service uses the concept of https://cloud.google.com/storage/docs/key-terms[Bucket]
|
||||
as a container for all the data. Buckets are usually created using the
|
||||
https://console.cloud.google.com/[Google Cloud Platform Console]. The plugin will not automatically
|
||||
create buckets.
|
||||
The Google Cloud Storage service uses the concept of a
|
||||
https://cloud.google.com/storage/docs/key-terms[bucket] as a container for all
|
||||
the data. Buckets are usually created using the
|
||||
https://console.cloud.google.com/[Google Cloud Platform Console]. The plugin
|
||||
does not automatically create buckets.
|
||||
|
||||
To create a new bucket:
|
||||
|
||||
1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console]
|
||||
2. Select your project
|
||||
3. Go to the https://console.cloud.google.com/storage/browser[Storage Browser]
|
||||
4. Click the "Create Bucket" button
|
||||
5. Enter the name of the new bucket
|
||||
6. Select a storage class
|
||||
7. Select a location
|
||||
8. Click the "Create" button
|
||||
1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console].
|
||||
2. Select your project.
|
||||
3. Go to the https://console.cloud.google.com/storage/browser[Storage Browser].
|
||||
4. Click the *Create Bucket* button.
|
||||
5. Enter the name of the new bucket.
|
||||
6. Select a storage class.
|
||||
7. Select a location.
|
||||
8. Click the *Create* button.
|
||||
|
||||
The bucket should now be created.
|
||||
For more detailed instructions, see the
|
||||
https://cloud.google.com/storage/docs/quickstart-console#create_a_bucket[Google Cloud documentation].
|
||||
|
||||
[[repository-gcs-service-authentication]]
|
||||
===== Service Authentication
|
||||
|
||||
The plugin supports two authentication modes:
|
||||
|
||||
* The built-in <<repository-gcs-using-compute-engine, Compute Engine authentication>>. This mode is
|
||||
recommended if your Elasticsearch node is running on a Compute Engine virtual machine.
|
||||
|
||||
* Specifying <<repository-gcs-using-service-account, Service Account>> credentials.
|
||||
|
||||
[[repository-gcs-using-compute-engine]]
|
||||
===== Using Compute Engine
|
||||
When running on Compute Engine, the plugin use Google's built-in authentication mechanism to
|
||||
authenticate on the Storage service. Compute Engine virtual machines are usually associated to a
|
||||
default service account. This service account can be found in the VM instance details in the
|
||||
https://console.cloud.google.com/compute/[Compute Engine console].
|
||||
|
||||
This is the default authentication mode and requires no configuration.
|
||||
|
||||
NOTE: The Compute Engine VM must be allowed to use the Storage service. This can be done only at VM
|
||||
creation time, when "Storage" access can be configured to "Read/Write" permission. Check your
|
||||
instance details at the section "Cloud API access scopes".
|
||||
The plugin must authenticate the requests it makes to the Google Cloud Storage
|
||||
service. It is common for Google client libraries to employ a strategy named https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application[application default credentials].
|
||||
However, that strategy is **not** supported for use with Elasticsearch. The
|
||||
plugin operates under the Elasticsearch process, which runs with the security
|
||||
manager enabled. The security manager obstructs the "automatic" credential discovery.
|
||||
Therefore, you must configure <<repository-gcs-using-service-account,service account>>
|
||||
credentials even if you are using an environment that does not normally require
|
||||
this configuration (such as Compute Engine, Kubernetes Engine or App Engine).
|
||||
|
||||
[[repository-gcs-using-service-account]]
|
||||
===== Using a Service Account
|
||||
If your Elasticsearch node is not running on Compute Engine, or if you don't want to use Google's
|
||||
built-in authentication mechanism, you can authenticate on the Storage service using a
|
||||
https://cloud.google.com/iam/docs/overview#service_account[Service Account] file.
|
||||
You have to obtain and provide https://cloud.google.com/iam/docs/overview#service_account[service account credentials]
|
||||
manually.
|
||||
|
||||
To create a service account file:
|
||||
For detailed information about generating JSON service account files, see the https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts[Google Cloud documentation].
|
||||
Note that the PKCS12 format is not supported by this plugin.
|
||||
|
||||
1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console]
|
||||
2. Select your project
|
||||
3. Got to the https://console.cloud.google.com/permissions[Permission] tab
|
||||
4. Select the https://console.cloud.google.com/permissions/serviceaccounts[Service Accounts] tab
|
||||
5. Click on "Create service account"
|
||||
6. Once created, select the new service account and download a JSON key file
|
||||
Here is a summary of the steps:
|
||||
|
||||
A service account file looks like this:
|
||||
1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console].
|
||||
2. Select your project.
|
||||
3. Got to the https://console.cloud.google.com/permissions[Permission] tab.
|
||||
4. Select the https://console.cloud.google.com/permissions/serviceaccounts[Service Accounts] tab.
|
||||
5. Click *Create service account*.
|
||||
6. After the account is created, select it and download a JSON key file.
|
||||
|
||||
A JSON service account file looks like this:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
@ -84,19 +79,26 @@ A service account file looks like this:
|
|||
"private_key_id": "...",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "service-account-for-your-repository@your-project-id.iam.gserviceaccount.com",
|
||||
"client_id": "..."
|
||||
"client_id": "...",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://accounts.google.com/o/oauth2/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/your-bucket@your-project-id.iam.gserviceaccount.com"
|
||||
}
|
||||
----
|
||||
// NOTCONSOLE
|
||||
|
||||
This file must be stored in the {ref}/secure-settings.html[elasticsearch keystore], under a setting name
|
||||
of the form `gcs.client.NAME.credentials_file`, where `NAME` is the name of the client configuration.
|
||||
The default client name is `default`, but a different client name can be specified in repository
|
||||
settings using `client`.
|
||||
To provide this file to the plugin, it must be stored in the {ref}/secure-settings.html[Elasticsearch keystore]. You must add a setting name of the form `gcs.client.NAME.credentials_file`, where `NAME`
|
||||
is the name of the client configuration for the repository. The implicit client
|
||||
name is `default`, but a different client name can be specified in the
|
||||
repository settings with the `client` key.
|
||||
|
||||
For example, if specifying the credentials file in the keystore under
|
||||
`gcs.client.my_alternate_client.credentials_file`, you can configure a repository to use these
|
||||
credentials like this:
|
||||
NOTE: Passing the file path via the GOOGLE_APPLICATION_CREDENTIALS environment
|
||||
variable is **not** supported.
|
||||
|
||||
For example, if you added a `gcs.client.my_alternate_client.credentials_file`
|
||||
setting in the keystore, you can configure a repository to use those credentials
|
||||
like this:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
@ -113,19 +115,18 @@ PUT _snapshot/my_gcs_repository
|
|||
// TEST[skip:we don't have gcs setup while testing this]
|
||||
|
||||
The `credentials_file` settings are {ref}/secure-settings.html#reloadable-secure-settings[reloadable].
|
||||
After you reload the settings, the internal `gcs` clients, used to transfer the
|
||||
snapshot contents, will utilize the latest settings from the keystore.
|
||||
After you reload the settings, the internal `gcs` clients, which are used to
|
||||
transfer the snapshot contents, utilize the latest settings from the keystore.
|
||||
|
||||
|
||||
NOTE: In progress snapshot/restore jobs will not be preempted by a *reload*
|
||||
of the client's `credentials_file` settings. They will complete using the client
|
||||
as it was built when the operation started.
|
||||
NOTE: Snapshot or restore jobs that are in progress are not preempted by a *reload*
|
||||
of the client's `credentials_file` settings. They complete using the client as
|
||||
it was built when the operation started.
|
||||
|
||||
[[repository-gcs-client]]
|
||||
==== Client Settings
|
||||
|
||||
The client used to connect to Google Cloud Storage has a number of settings available.
|
||||
Client setting names are of the form `gcs.client.CLIENT_NAME.SETTING_NAME` and specified
|
||||
Client setting names are of the form `gcs.client.CLIENT_NAME.SETTING_NAME` and are specified
|
||||
inside `elasticsearch.yml`. The default client name looked up by a `gcs` repository is
|
||||
called `default`, but can be customized with the repository setting `client`.
|
||||
|
||||
|
@ -146,7 +147,7 @@ PUT _snapshot/my_gcs_repository
|
|||
// TEST[skip:we don't have gcs setup while testing this]
|
||||
|
||||
Some settings are sensitive and must be stored in the
|
||||
{ref}/secure-settings.html[elasticsearch keystore]. This is the case for the service account file:
|
||||
{ref}/secure-settings.html[Elasticsearch keystore]. This is the case for the service account file:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
|
@ -185,7 +186,7 @@ are marked as `Secure`.
|
|||
|
||||
`project_id`::
|
||||
|
||||
The Google Cloud project id. This will be automatically infered from the credentials file but
|
||||
The Google Cloud project id. This will be automatically inferred from the credentials file but
|
||||
can be specified explicitly. For example, it can be used to switch between projects when the
|
||||
same credentials are usable for both the production and the development projects.
|
||||
|
||||
|
@ -248,8 +249,8 @@ The following settings are supported:
|
|||
|
||||
The service account used to access the bucket must have the "Writer" access to the bucket:
|
||||
|
||||
1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console]
|
||||
2. Select your project
|
||||
3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser]
|
||||
4. Select the bucket and "Edit bucket permission"
|
||||
5. The service account must be configured as a "User" with "Writer" access
|
||||
1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console].
|
||||
2. Select your project.
|
||||
3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser].
|
||||
4. Select the bucket and "Edit bucket permission".
|
||||
5. The service account must be configured as a "User" with "Writer" access.
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
[[modules-discovery-zen]]
|
||||
=== Zen Discovery
|
||||
|
||||
The zen discovery is the built in discovery module for Elasticsearch and
|
||||
the default. It provides unicast discovery, but can be extended to
|
||||
support cloud environments and other forms of discovery.
|
||||
Zen discovery is the built-in, default, discovery module for Elasticsearch. It
|
||||
provides unicast and file-based discovery, and can be extended to support cloud
|
||||
environments and other forms of discovery via plugins.
|
||||
|
||||
The zen discovery is integrated with other modules, for example, all
|
||||
communication between nodes is done using the
|
||||
<<modules-transport,transport>> module.
|
||||
Zen discovery is integrated with other modules, for example, all communication
|
||||
between nodes is done using the <<modules-transport,transport>> module.
|
||||
|
||||
It is separated into several sub modules, which are explained below:
|
||||
|
||||
|
@ -15,86 +14,155 @@ It is separated into several sub modules, which are explained below:
|
|||
[[ping]]
|
||||
==== Ping
|
||||
|
||||
This is the process where a node uses the discovery mechanisms to find
|
||||
other nodes.
|
||||
This is the process where a node uses the discovery mechanisms to find other
|
||||
nodes.
|
||||
|
||||
[float]
|
||||
[[discovery-seed-nodes]]
|
||||
==== Seed nodes
|
||||
|
||||
Zen discovery uses a list of _seed_ nodes in order to start off the discovery
|
||||
process. At startup, or when electing a new master, Elasticsearch tries to
|
||||
connect to each seed node in its list, and holds a gossip-like conversation with
|
||||
them to find other nodes and to build a complete picture of the cluster. By
|
||||
default there are two methods for configuring the list of seed nodes: _unicast_
|
||||
and _file-based_. It is recommended that the list of seed nodes comprises the
|
||||
list of master-eligible nodes in the cluster.
|
||||
|
||||
[float]
|
||||
[[unicast]]
|
||||
===== Unicast
|
||||
|
||||
Unicast discovery requires a list of hosts to use that will act as gossip
|
||||
routers. These hosts can be specified as hostnames or IP addresses; hosts
|
||||
specified as hostnames are resolved to IP addresses during each round of
|
||||
pinging. Note that if you are in an environment where DNS resolutions vary with
|
||||
time, you might need to adjust your <<networkaddress-cache-ttl,JVM security
|
||||
settings>>.
|
||||
Unicast discovery configures a static list of hosts for use as seed nodes.
|
||||
These hosts can be specified as hostnames or IP addresses; hosts specified as
|
||||
hostnames are resolved to IP addresses during each round of pinging. Note that
|
||||
if you are in an environment where DNS resolutions vary with time, you might
|
||||
need to adjust your <<networkaddress-cache-ttl,JVM security settings>>.
|
||||
|
||||
It is recommended that the unicast hosts list be maintained as the list of
|
||||
master-eligible nodes in the cluster.
|
||||
The list of hosts is set using the `discovery.zen.ping.unicast.hosts` static
|
||||
setting. This is either an array of hosts or a comma-delimited string. Each
|
||||
value should be in the form of `host:port` or `host` (where `port` defaults to
|
||||
the setting `transport.profiles.default.port` falling back to
|
||||
`transport.tcp.port` if not set). Note that IPv6 hosts must be bracketed. The
|
||||
default for this setting is `127.0.0.1, [::1]`
|
||||
|
||||
Unicast discovery provides the following settings with the `discovery.zen.ping.unicast` prefix:
|
||||
Additionally, the `discovery.zen.ping.unicast.resolve_timeout` configures the
|
||||
amount of time to wait for DNS lookups on each round of pinging. This is
|
||||
specified as a <<time-units, time unit>> and defaults to 5s.
|
||||
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Setting |Description
|
||||
|`hosts` |Either an array setting or a comma delimited setting. Each
|
||||
value should be in the form of `host:port` or `host` (where `port` defaults to the setting `transport.profiles.default.port`
|
||||
falling back to `transport.tcp.port` if not set). Note that IPv6 hosts must be bracketed. Defaults to `127.0.0.1, [::1]`
|
||||
|`hosts.resolve_timeout` |The amount of time to wait for DNS lookups on each round of pinging. Specified as
|
||||
<<time-units, time units>>. Defaults to 5s.
|
||||
|=======================================================================
|
||||
Unicast discovery uses the <<modules-transport,transport>> module to perform the
|
||||
discovery.
|
||||
|
||||
The unicast discovery uses the <<modules-transport,transport>> module to perform the discovery.
|
||||
[float]
|
||||
[[file-based-hosts-provider]]
|
||||
===== File-based
|
||||
|
||||
In addition to hosts provided by the static `discovery.zen.ping.unicast.hosts`
|
||||
setting, it is possible to provide a list of hosts via an external file.
|
||||
Elasticsearch reloads this file when it changes, so that the list of seed nodes
|
||||
can change dynamically without needing to restart each node. For example, this
|
||||
gives a convenient mechanism for an Elasticsearch instance that is run in a
|
||||
Docker container to be dynamically supplied with a list of IP addresses to
|
||||
connect to for Zen discovery when those IP addresses may not be known at node
|
||||
startup.
|
||||
|
||||
To enable file-based discovery, configure the `file` hosts provider as follows:
|
||||
|
||||
[source,txt]
|
||||
----------------------------------------------------------------
|
||||
discovery.zen.hosts_provider: file
|
||||
----------------------------------------------------------------
|
||||
|
||||
Then create a file at `$ES_PATH_CONF/unicast_hosts.txt` in the format described
|
||||
below. Any time a change is made to the `unicast_hosts.txt` file the new
|
||||
changes will be picked up by Elasticsearch and the new hosts list will be used.
|
||||
|
||||
Note that the file-based discovery plugin augments the unicast hosts list in
|
||||
`elasticsearch.yml`: if there are valid unicast host entries in
|
||||
`discovery.zen.ping.unicast.hosts` then they will be used in addition to those
|
||||
supplied in `unicast_hosts.txt`.
|
||||
|
||||
The `discovery.zen.ping.unicast.resolve_timeout` setting also applies to DNS
|
||||
lookups for nodes specified by address via file-based discovery. This is
|
||||
specified as a <<time-units, time unit>> and defaults to 5s.
|
||||
|
||||
The format of the file is to specify one node entry per line. Each node entry
|
||||
consists of the host (host name or IP address) and an optional transport port
|
||||
number. If the port number is specified, is must come immediately after the
|
||||
host (on the same line) separated by a `:`. If the port number is not
|
||||
specified, a default value of 9300 is used.
|
||||
|
||||
For example, this is an example of `unicast_hosts.txt` for a cluster with four
|
||||
nodes that participate in unicast discovery, some of which are not running on
|
||||
the default port:
|
||||
|
||||
[source,txt]
|
||||
----------------------------------------------------------------
|
||||
10.10.10.5
|
||||
10.10.10.6:9305
|
||||
10.10.10.5:10005
|
||||
# an IPv6 address
|
||||
[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:9301
|
||||
----------------------------------------------------------------
|
||||
|
||||
Host names are allowed instead of IP addresses (similar to
|
||||
`discovery.zen.ping.unicast.hosts`), and IPv6 addresses must be specified in
|
||||
brackets with the port coming after the brackets.
|
||||
|
||||
It is also possible to add comments to this file. All comments must appear on
|
||||
their lines starting with `#` (i.e. comments cannot start in the middle of a
|
||||
line).
|
||||
|
||||
[float]
|
||||
[[master-election]]
|
||||
==== Master Election
|
||||
|
||||
As part of the ping process a master of the cluster is either
|
||||
elected or joined to. This is done automatically. The
|
||||
`discovery.zen.ping_timeout` (which defaults to `3s`) determines how long the node
|
||||
will wait before deciding on starting an election or joining an existing cluster.
|
||||
Three pings will be sent over this timeout interval. In case where no decision can be
|
||||
reached after the timeout, the pinging process restarts.
|
||||
In slow or congested networks, three seconds might not be enough for a node to become
|
||||
aware of the other nodes in its environment before making an election decision.
|
||||
Increasing the timeout should be done with care in that case, as it will slow down the
|
||||
election process.
|
||||
Once a node decides to join an existing formed cluster, it
|
||||
will send a join request to the master (`discovery.zen.join_timeout`)
|
||||
with a timeout defaulting at 20 times the ping timeout.
|
||||
As part of the ping process a master of the cluster is either elected or joined
|
||||
to. This is done automatically. The `discovery.zen.ping_timeout` (which defaults
|
||||
to `3s`) determines how long the node will wait before deciding on starting an
|
||||
election or joining an existing cluster. Three pings will be sent over this
|
||||
timeout interval. In case where no decision can be reached after the timeout,
|
||||
the pinging process restarts. In slow or congested networks, three seconds
|
||||
might not be enough for a node to become aware of the other nodes in its
|
||||
environment before making an election decision. Increasing the timeout should
|
||||
be done with care in that case, as it will slow down the election process. Once
|
||||
a node decides to join an existing formed cluster, it will send a join request
|
||||
to the master (`discovery.zen.join_timeout`) with a timeout defaulting at 20
|
||||
times the ping timeout.
|
||||
|
||||
When the master node stops or has encountered a problem, the cluster nodes
|
||||
start pinging again and will elect a new master. This pinging round also
|
||||
serves as a protection against (partial) network failures where a node may unjustly
|
||||
think that the master has failed. In this case the node will simply hear from
|
||||
other nodes about the currently active master.
|
||||
When the master node stops or has encountered a problem, the cluster nodes start
|
||||
pinging again and will elect a new master. This pinging round also serves as a
|
||||
protection against (partial) network failures where a node may unjustly think
|
||||
that the master has failed. In this case the node will simply hear from other
|
||||
nodes about the currently active master.
|
||||
|
||||
If `discovery.zen.master_election.ignore_non_master_pings` is `true`, pings from nodes that are not master
|
||||
eligible (nodes where `node.master` is `false`) are ignored during master election; the default value is
|
||||
If `discovery.zen.master_election.ignore_non_master_pings` is `true`, pings from
|
||||
nodes that are not master eligible (nodes where `node.master` is `false`) are
|
||||
ignored during master election; the default value is `false`.
|
||||
|
||||
Nodes can be excluded from becoming a master by setting `node.master` to
|
||||
`false`.
|
||||
|
||||
Nodes can be excluded from becoming a master by setting `node.master` to `false`.
|
||||
|
||||
The `discovery.zen.minimum_master_nodes` sets the minimum
|
||||
number of master eligible nodes that need to join a newly elected master in order for an election to
|
||||
complete and for the elected node to accept its mastership. The same setting controls the minimum number of
|
||||
active master eligible nodes that should be a part of any active cluster. If this requirement is not met the
|
||||
active master node will step down and a new master election will begin.
|
||||
The `discovery.zen.minimum_master_nodes` sets the minimum number of master
|
||||
eligible nodes that need to join a newly elected master in order for an election
|
||||
to complete and for the elected node to accept its mastership. The same setting
|
||||
controls the minimum number of active master eligible nodes that should be a
|
||||
part of any active cluster. If this requirement is not met the active master
|
||||
node will step down and a new master election will begin.
|
||||
|
||||
This setting must be set to a <<minimum_master_nodes,quorum>> of your master
|
||||
eligible nodes. It is recommended to avoid having only two master eligible
|
||||
nodes, since a quorum of two is two. Therefore, a loss of either master
|
||||
eligible node will result in an inoperable cluster.
|
||||
nodes, since a quorum of two is two. Therefore, a loss of either master eligible
|
||||
node will result in an inoperable cluster.
|
||||
|
||||
[float]
|
||||
[[fault-detection]]
|
||||
==== Fault Detection
|
||||
|
||||
There are two fault detection processes running. The first is by the
|
||||
master, to ping all the other nodes in the cluster and verify that they
|
||||
are alive. And on the other end, each node pings to master to verify if
|
||||
its still alive or an election process needs to be initiated.
|
||||
There are two fault detection processes running. The first is by the master, to
|
||||
ping all the other nodes in the cluster and verify that they are alive. And on
|
||||
the other end, each node pings to master to verify if its still alive or an
|
||||
election process needs to be initiated.
|
||||
|
||||
The following settings control the fault detection process using the
|
||||
`discovery.zen.fd` prefix:
|
||||
|
@ -116,19 +184,21 @@ considered failed. Defaults to `3`.
|
|||
|
||||
The master node is the only node in a cluster that can make changes to the
|
||||
cluster state. The master node processes one cluster state update at a time,
|
||||
applies the required changes and publishes the updated cluster state to all
|
||||
the other nodes in the cluster. Each node receives the publish message, acknowledges
|
||||
it, but does *not* yet apply it. If the master does not receive acknowledgement from
|
||||
at least `discovery.zen.minimum_master_nodes` nodes within a certain time (controlled by
|
||||
the `discovery.zen.commit_timeout` setting and defaults to 30 seconds) the cluster state
|
||||
change is rejected.
|
||||
applies the required changes and publishes the updated cluster state to all the
|
||||
other nodes in the cluster. Each node receives the publish message, acknowledges
|
||||
it, but does *not* yet apply it. If the master does not receive acknowledgement
|
||||
from at least `discovery.zen.minimum_master_nodes` nodes within a certain time
|
||||
(controlled by the `discovery.zen.commit_timeout` setting and defaults to 30
|
||||
seconds) the cluster state change is rejected.
|
||||
|
||||
Once enough nodes have responded, the cluster state is committed and a message will
|
||||
be sent to all the nodes. The nodes then proceed to apply the new cluster state to their
|
||||
internal state. The master node waits for all nodes to respond, up to a timeout, before
|
||||
going ahead processing the next updates in the queue. The `discovery.zen.publish_timeout` is
|
||||
set by default to 30 seconds and is measured from the moment the publishing started. Both
|
||||
timeout settings can be changed dynamically through the <<cluster-update-settings,cluster update settings api>>
|
||||
Once enough nodes have responded, the cluster state is committed and a message
|
||||
will be sent to all the nodes. The nodes then proceed to apply the new cluster
|
||||
state to their internal state. The master node waits for all nodes to respond,
|
||||
up to a timeout, before going ahead processing the next updates in the queue.
|
||||
The `discovery.zen.publish_timeout` is set by default to 30 seconds and is
|
||||
measured from the moment the publishing started. Both timeout settings can be
|
||||
changed dynamically through the <<cluster-update-settings,cluster update
|
||||
settings api>>
|
||||
|
||||
[float]
|
||||
[[no-master-block]]
|
||||
|
@ -143,10 +213,14 @@ rejected when there is no active master.
|
|||
The `discovery.zen.no_master_block` setting has two valid options:
|
||||
|
||||
[horizontal]
|
||||
`all`:: All operations on the node--i.e. both read & writes--will be rejected. This also applies for api cluster state
|
||||
read or write operations, like the get index settings, put mapping and cluster state api.
|
||||
`write`:: (default) Write operations will be rejected. Read operations will succeed, based on the last known cluster configuration.
|
||||
This may result in partial reads of stale data as this node may be isolated from the rest of the cluster.
|
||||
`all`:: All operations on the node--i.e. both read & writes--will be rejected.
|
||||
This also applies for api cluster state read or write operations, like the get
|
||||
index settings, put mapping and cluster state api.
|
||||
`write`:: (default) Write operations will be rejected. Read operations will
|
||||
succeed, based on the last known cluster configuration. This may result in
|
||||
partial reads of stale data as this node may be isolated from the rest of the
|
||||
cluster.
|
||||
|
||||
The `discovery.zen.no_master_block` setting doesn't apply to nodes-based apis (for example cluster stats, node info and
|
||||
node stats apis). Requests to these apis will not be blocked and can run on any available node.
|
||||
The `discovery.zen.no_master_block` setting doesn't apply to nodes-based apis
|
||||
(for example cluster stats, node info and node stats apis). Requests to these
|
||||
apis will not be blocked and can run on any available node.
|
||||
|
|
Binary file not shown.
|
@ -1,6 +1,6 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.9-all.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionSha256Sum=39e2d5803bbd5eaf6c8efe07067b0e5a00235e8c71318642b2ed262920b27721
|
||||
distributionSha256Sum=fc049dcbcb245d5892bebae143bd515a78f6a5a93cec99d489b312dc0ce4aad9
|
||||
|
|
|
@ -255,6 +255,10 @@ public class JarHell {
|
|||
}
|
||||
|
||||
private static void checkClass(Map<String, Path> clazzes, String clazz, Path jarpath) {
|
||||
if (clazz.equals("module-info") || clazz.endsWith(".module-info")) {
|
||||
// Ignore jigsaw module descriptions
|
||||
return;
|
||||
}
|
||||
Path previous = clazzes.put(clazz, jarpath);
|
||||
if (previous != null) {
|
||||
if (previous.equals(jarpath)) {
|
||||
|
|
|
@ -76,6 +76,28 @@ public class JarHellTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testModuleInfo() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
JarHell.checkJarHell(
|
||||
asSet(
|
||||
makeJar(dir, "foo.jar", null, "module-info.class"),
|
||||
makeJar(dir, "bar.jar", null, "module-info.class")
|
||||
),
|
||||
logger::debug
|
||||
);
|
||||
}
|
||||
|
||||
public void testModuleInfoPackage() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
JarHell.checkJarHell(
|
||||
asSet(
|
||||
makeJar(dir, "foo.jar", null, "foo/bar/module-info.class"),
|
||||
makeJar(dir, "bar.jar", null, "foo/bar/module-info.class")
|
||||
),
|
||||
logger::debug
|
||||
);
|
||||
}
|
||||
|
||||
public void testDirsOnClasspath() throws Exception {
|
||||
Path dir1 = createTempDir();
|
||||
Path dir2 = createTempDir();
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException;
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty;
|
||||
|
@ -96,6 +97,13 @@ public final class ForEachProcessor extends AbstractProcessor {
|
|||
}
|
||||
|
||||
public static final class Factory implements Processor.Factory {
|
||||
|
||||
private final ScriptService scriptService;
|
||||
|
||||
Factory(ScriptService scriptService) {
|
||||
this.scriptService = scriptService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ForEachProcessor create(Map<String, Processor.Factory> factories, String tag,
|
||||
Map<String, Object> config) throws Exception {
|
||||
|
@ -107,7 +115,8 @@ public final class ForEachProcessor extends AbstractProcessor {
|
|||
throw newConfigurationException(TYPE, tag, "processor", "Must specify exactly one processor type");
|
||||
}
|
||||
Map.Entry<String, Map<String, Object>> entry = entries.iterator().next();
|
||||
Processor processor = ConfigurationUtils.readProcessor(factories, entry.getKey(), entry.getValue());
|
||||
Processor processor =
|
||||
ConfigurationUtils.readProcessor(factories, scriptService, entry.getKey(), entry.getValue());
|
||||
return new ForEachProcessor(tag, field, processor, ignoreMissing);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
|
|||
processors.put(ConvertProcessor.TYPE, new ConvertProcessor.Factory());
|
||||
processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory());
|
||||
processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService));
|
||||
processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory());
|
||||
processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory(parameters.scriptService));
|
||||
processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService));
|
||||
processors.put(SortProcessor.TYPE, new SortProcessor.Factory());
|
||||
processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(GROK_PATTERNS, createGrokThreadWatchdog(parameters)));
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.ingest.common;
|
|||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.ingest.TestProcessor;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
|
@ -30,14 +31,17 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class ForEachProcessorFactoryTests extends ESTestCase {
|
||||
|
||||
private final ScriptService scriptService = mock(ScriptService.class);
|
||||
|
||||
public void testCreate() throws Exception {
|
||||
Processor processor = new TestProcessor(ingestDocument -> { });
|
||||
Map<String, Processor.Factory> registry = new HashMap<>();
|
||||
registry.put("_name", (r, t, c) -> processor);
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory();
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService);
|
||||
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
|
@ -53,7 +57,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
|
|||
Processor processor = new TestProcessor(ingestDocument -> { });
|
||||
Map<String, Processor.Factory> registry = new HashMap<>();
|
||||
registry.put("_name", (r, t, c) -> processor);
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory();
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService);
|
||||
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
|
@ -71,7 +75,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
|
|||
Map<String, Processor.Factory> registry = new HashMap<>();
|
||||
registry.put("_first", (r, t, c) -> processor);
|
||||
registry.put("_second", (r, t, c) -> processor);
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory();
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService);
|
||||
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
|
@ -84,7 +88,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCreateWithNonExistingProcessorType() throws Exception {
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory();
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("processor", Collections.singletonMap("_name", Collections.emptyMap()));
|
||||
|
@ -97,7 +101,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
|
|||
Processor processor = new TestProcessor(ingestDocument -> { });
|
||||
Map<String, Processor.Factory> registry = new HashMap<>();
|
||||
registry.put("_name", (r, t, c) -> processor);
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory();
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("processor", Collections.singletonList(Collections.singletonMap("_name", Collections.emptyMap())));
|
||||
Exception exception = expectThrows(Exception.class, () -> forEachFactory.create(registry, null, config));
|
||||
|
@ -105,7 +109,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCreateWithMissingProcessor() {
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory();
|
||||
ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
Exception exception = expectThrows(Exception.class, () -> forEachFactory.create(Collections.emptyMap(), null, config));
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test conditional processor fulfilled condition":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"bytes" : {
|
||||
"if" : "ctx.conditional_field == 'bar'",
|
||||
"field" : "bytes_source_field",
|
||||
"target_field" : "bytes_target_field"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {bytes_source_field: "1kb", conditional_field: "bar"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.conditional_field: "bar" }
|
||||
- match: { _source.bytes_target_field: 1024 }
|
||||
|
||||
---
|
||||
"Test conditional processor unfulfilled condition":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"bytes" : {
|
||||
"if" : "ctx.conditional_field == 'foo'",
|
||||
"field" : "bytes_source_field",
|
||||
"target_field" : "bytes_target_field"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {bytes_source_field: "1kb", conditional_field: "bar"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.conditional_field: "bar" }
|
||||
- is_false: _source.bytes_target_field
|
||||
|
|
@ -61,9 +61,12 @@ public final class Whitelist {
|
|||
/** The {@link List} of all the whitelisted Painless classes. */
|
||||
public final List<WhitelistClass> whitelistClasses;
|
||||
|
||||
public final List<WhitelistBinding> whitelistBindings;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public Whitelist(ClassLoader classLoader, List<WhitelistClass> whitelistClasses) {
|
||||
public Whitelist(ClassLoader classLoader, List<WhitelistClass> whitelistClasses, List<WhitelistBinding> whitelistBindings) {
|
||||
this.classLoader = Objects.requireNonNull(classLoader);
|
||||
this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses));
|
||||
this.whitelistBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistBindings));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless.spi;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A binding represents a method call that stores state. Each binding class must have exactly one
|
||||
* public constructor and one public method excluding those inherited directly from {@link Object}.
|
||||
* The canonical type name parameters provided must match those of the constructor and method combined.
|
||||
* The constructor for a binding class will be called when the binding method is called for the first
|
||||
* time at which point state may be stored for the arguments passed into the constructor. The method
|
||||
* for a binding class will be called each time the binding method is called and may use the previously
|
||||
* stored state.
|
||||
*/
|
||||
public class WhitelistBinding {
|
||||
|
||||
/** Information about where this constructor was whitelisted from. */
|
||||
public final String origin;
|
||||
|
||||
/** The Java class name this binding represents. */
|
||||
public final String targetJavaClassName;
|
||||
|
||||
/** The method name for this binding. */
|
||||
public final String methodName;
|
||||
|
||||
/**
|
||||
* The canonical type name for the return type.
|
||||
*/
|
||||
public final String returnCanonicalTypeName;
|
||||
|
||||
/**
|
||||
* A {@link List} of {@link String}s that are the Painless type names for the parameters of the
|
||||
* constructor which can be used to look up the Java constructor through reflection.
|
||||
*/
|
||||
public final List<String> canonicalTypeNameParameters;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public WhitelistBinding(String origin, String targetJavaClassName,
|
||||
String methodName, String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.targetJavaClassName = Objects.requireNonNull(targetJavaClassName);
|
||||
|
||||
this.methodName = Objects.requireNonNull(methodName);
|
||||
this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName);
|
||||
this.canonicalTypeNameParameters = Objects.requireNonNull(canonicalTypeNameParameters);
|
||||
}
|
||||
}
|
|
@ -62,9 +62,8 @@ public final class WhitelistClass {
|
|||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public WhitelistClass(String origin, String javaClassName, boolean noImport,
|
||||
List<WhitelistConstructor> whitelistConstructors,
|
||||
List<WhitelistMethod> whitelistMethods,
|
||||
List<WhitelistField> whitelistFields) {
|
||||
List<WhitelistConstructor> whitelistConstructors, List<WhitelistMethod> whitelistMethods, List<WhitelistField> whitelistFields)
|
||||
{
|
||||
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.javaClassName = Objects.requireNonNull(javaClassName);
|
||||
|
|
|
@ -133,6 +133,7 @@ public final class WhitelistLoader {
|
|||
*/
|
||||
public static Whitelist loadFromResourceFiles(Class<?> resource, String... filepaths) {
|
||||
List<WhitelistClass> whitelistClasses = new ArrayList<>();
|
||||
List<WhitelistBinding> whitelistBindings = new ArrayList<>();
|
||||
|
||||
// Execute a single pass through the whitelist text files. This will gather all the
|
||||
// constructors, methods, augmented methods, and fields for each whitelisted class.
|
||||
|
@ -141,8 +142,9 @@ public final class WhitelistLoader {
|
|||
int number = -1;
|
||||
|
||||
try (LineNumberReader reader = new LineNumberReader(
|
||||
new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) {
|
||||
new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) {
|
||||
|
||||
String parseType = null;
|
||||
String whitelistClassOrigin = null;
|
||||
String javaClassName = null;
|
||||
boolean noImport = false;
|
||||
|
@ -165,7 +167,11 @@ public final class WhitelistLoader {
|
|||
// Ensure the final token of the line is '{'.
|
||||
if (line.endsWith("{") == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid class definition: failed to parse class opening bracket [" + line + "]");
|
||||
"invalid class definition: failed to parse class opening bracket [" + line + "]");
|
||||
}
|
||||
|
||||
if (parseType != null) {
|
||||
throw new IllegalArgumentException("invalid definition: cannot embed class definition [" + line + "]");
|
||||
}
|
||||
|
||||
// Parse the Java class name.
|
||||
|
@ -178,6 +184,7 @@ public final class WhitelistLoader {
|
|||
throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]");
|
||||
}
|
||||
|
||||
parseType = "class";
|
||||
whitelistClassOrigin = "[" + filepath + "]:[" + number + "]";
|
||||
javaClassName = tokens[0];
|
||||
|
||||
|
@ -185,34 +192,117 @@ public final class WhitelistLoader {
|
|||
whitelistConstructors = new ArrayList<>();
|
||||
whitelistMethods = new ArrayList<>();
|
||||
whitelistFields = new ArrayList<>();
|
||||
|
||||
// Handle the end of a class, by creating a new WhitelistClass with all the previously gathered
|
||||
// constructors, methods, augmented methods, and fields, and adding it to the list of whitelisted classes.
|
||||
// Expects the following format: '}' '\n'
|
||||
} else if (line.equals("}")) {
|
||||
if (javaClassName == null) {
|
||||
throw new IllegalArgumentException("invalid class definition: extraneous closing bracket");
|
||||
} else if (line.startsWith("static ")) {
|
||||
// Ensure the final token of the line is '{'.
|
||||
if (line.endsWith("{") == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid static definition: failed to parse static opening bracket [" + line + "]");
|
||||
}
|
||||
|
||||
whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, noImport,
|
||||
whitelistConstructors, whitelistMethods, whitelistFields));
|
||||
if (parseType != null) {
|
||||
throw new IllegalArgumentException("invalid definition: cannot embed static definition [" + line + "]");
|
||||
}
|
||||
|
||||
// Set all the variables to null to ensure a new class definition is found before other parsable values.
|
||||
whitelistClassOrigin = null;
|
||||
javaClassName = null;
|
||||
noImport = false;
|
||||
whitelistConstructors = null;
|
||||
whitelistMethods = null;
|
||||
whitelistFields = null;
|
||||
parseType = "static";
|
||||
|
||||
// Handle all other valid cases.
|
||||
} else {
|
||||
// Handle the end of a definition and reset all previously gathered values.
|
||||
// Expects the following format: '}' '\n'
|
||||
} else if (line.equals("}")) {
|
||||
if (parseType == null) {
|
||||
throw new IllegalArgumentException("invalid definition: extraneous closing bracket");
|
||||
}
|
||||
|
||||
// Create a new WhitelistClass with all the previously gathered constructors, methods,
|
||||
// augmented methods, and fields, and add it to the list of whitelisted classes.
|
||||
if ("class".equals(parseType)) {
|
||||
whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, noImport,
|
||||
whitelistConstructors, whitelistMethods, whitelistFields));
|
||||
|
||||
whitelistClassOrigin = null;
|
||||
javaClassName = null;
|
||||
noImport = false;
|
||||
whitelistConstructors = null;
|
||||
whitelistMethods = null;
|
||||
whitelistFields = null;
|
||||
}
|
||||
|
||||
// Reset the parseType.
|
||||
parseType = null;
|
||||
|
||||
// Handle static definition types.
|
||||
// Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' 'bound_to' ID '\n'
|
||||
} else if ("static".equals(parseType)) {
|
||||
// Mark the origin of this parsable object.
|
||||
String origin = "[" + filepath + "]:[" + number + "]";
|
||||
|
||||
// Parse the tokens prior to the method parameters.
|
||||
int parameterStartIndex = line.indexOf('(');
|
||||
|
||||
if (parameterStartIndex == -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"illegal static definition: start of method parameters not found [" + line + "]");
|
||||
}
|
||||
|
||||
String[] tokens = line.substring(0, parameterStartIndex).trim().split("\\s+");
|
||||
|
||||
String methodName;
|
||||
|
||||
// Based on the number of tokens, look up the Java method name.
|
||||
if (tokens.length == 2) {
|
||||
methodName = tokens[1];
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid method definition: unexpected format [" + line + "]");
|
||||
}
|
||||
|
||||
String returnCanonicalTypeName = tokens[0];
|
||||
|
||||
// Parse the method parameters.
|
||||
int parameterEndIndex = line.indexOf(')');
|
||||
|
||||
if (parameterEndIndex == -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"illegal static definition: end of method parameters not found [" + line + "]");
|
||||
}
|
||||
|
||||
String[] canonicalTypeNameParameters =
|
||||
line.substring(parameterStartIndex + 1, parameterEndIndex).replaceAll("\\s+", "").split(",");
|
||||
|
||||
// Handle the case for a method with no parameters.
|
||||
if ("".equals(canonicalTypeNameParameters[0])) {
|
||||
canonicalTypeNameParameters = new String[0];
|
||||
}
|
||||
|
||||
// Parse the static type and class.
|
||||
tokens = line.substring(parameterEndIndex + 1).trim().split("\\s+");
|
||||
|
||||
String staticType;
|
||||
String targetJavaClassName;
|
||||
|
||||
// Based on the number of tokens, look up the type and class.
|
||||
if (tokens.length == 2) {
|
||||
staticType = tokens[0];
|
||||
targetJavaClassName = tokens[1];
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid static definition: unexpected format [" + line + "]");
|
||||
}
|
||||
|
||||
// Check the static type is valid.
|
||||
if ("bound_to".equals(staticType) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid static definition: unexpected static type [" + staticType + "] [" + line + "]");
|
||||
}
|
||||
|
||||
whitelistBindings.add(new WhitelistBinding(origin, targetJavaClassName,
|
||||
methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters)));
|
||||
|
||||
// Handle class definition types.
|
||||
} else if ("class".equals(parseType)) {
|
||||
// Mark the origin of this parsable object.
|
||||
String origin = "[" + filepath + "]:[" + number + "]";
|
||||
|
||||
// Ensure we have a defined class before adding any constructors, methods, augmented methods, or fields.
|
||||
if (javaClassName == null) {
|
||||
throw new IllegalArgumentException("invalid object definition: expected a class name [" + line + "]");
|
||||
if (parseType == null) {
|
||||
throw new IllegalArgumentException("invalid definition: expected one of ['class', 'static'] [" + line + "]");
|
||||
}
|
||||
|
||||
// Handle the case for a constructor definition.
|
||||
|
@ -221,7 +311,7 @@ public final class WhitelistLoader {
|
|||
// Ensure the final token of the line is ')'.
|
||||
if (line.endsWith(")") == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid constructor definition: expected a closing parenthesis [" + line + "]");
|
||||
"invalid constructor definition: expected a closing parenthesis [" + line + "]");
|
||||
}
|
||||
|
||||
// Parse the constructor parameters.
|
||||
|
@ -234,34 +324,34 @@ public final class WhitelistLoader {
|
|||
|
||||
whitelistConstructors.add(new WhitelistConstructor(origin, Arrays.asList(tokens)));
|
||||
|
||||
// Handle the case for a method or augmented method definition.
|
||||
// Expects the following format: ID ID? ID '(' ( ID ( ',' ID )* )? ')' '\n'
|
||||
// Handle the case for a method or augmented method definition.
|
||||
// Expects the following format: ID ID? ID '(' ( ID ( ',' ID )* )? ')' '\n'
|
||||
} else if (line.contains("(")) {
|
||||
// Ensure the final token of the line is ')'.
|
||||
if (line.endsWith(")") == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid method definition: expected a closing parenthesis [" + line + "]");
|
||||
"invalid method definition: expected a closing parenthesis [" + line + "]");
|
||||
}
|
||||
|
||||
// Parse the tokens prior to the method parameters.
|
||||
int parameterIndex = line.indexOf('(');
|
||||
String[] tokens = line.trim().substring(0, parameterIndex).split("\\s+");
|
||||
String[] tokens = line.substring(0, parameterIndex).trim().split("\\s+");
|
||||
|
||||
String javaMethodName;
|
||||
String methodName;
|
||||
String javaAugmentedClassName;
|
||||
|
||||
// Based on the number of tokens, look up the Java method name and if provided the Java augmented class.
|
||||
if (tokens.length == 2) {
|
||||
javaMethodName = tokens[1];
|
||||
methodName = tokens[1];
|
||||
javaAugmentedClassName = null;
|
||||
} else if (tokens.length == 3) {
|
||||
javaMethodName = tokens[2];
|
||||
methodName = tokens[2];
|
||||
javaAugmentedClassName = tokens[1];
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid method definition: unexpected format [" + line + "]");
|
||||
}
|
||||
|
||||
String painlessReturnTypeName = tokens[0];
|
||||
String returnCanonicalTypeName = tokens[0];
|
||||
|
||||
// Parse the method parameters.
|
||||
tokens = line.substring(parameterIndex + 1, line.length() - 1).replaceAll("\\s+", "").split(",");
|
||||
|
@ -271,11 +361,11 @@ public final class WhitelistLoader {
|
|||
tokens = new String[0];
|
||||
}
|
||||
|
||||
whitelistMethods.add(new WhitelistMethod(origin, javaAugmentedClassName, javaMethodName,
|
||||
painlessReturnTypeName, Arrays.asList(tokens)));
|
||||
whitelistMethods.add(new WhitelistMethod(origin, javaAugmentedClassName, methodName,
|
||||
returnCanonicalTypeName, Arrays.asList(tokens)));
|
||||
|
||||
// Handle the case for a field definition.
|
||||
// Expects the following format: ID ID '\n'
|
||||
// Handle the case for a field definition.
|
||||
// Expects the following format: ID ID '\n'
|
||||
} else {
|
||||
// Parse the field tokens.
|
||||
String[] tokens = line.split("\\s+");
|
||||
|
@ -287,20 +377,23 @@ public final class WhitelistLoader {
|
|||
|
||||
whitelistFields.add(new WhitelistField(origin, tokens[1], tokens[0]));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid definition: unable to parse line [" + line + "]");
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure all classes end with a '}' token before the end of the file.
|
||||
if (javaClassName != null) {
|
||||
throw new IllegalArgumentException("invalid class definition: expected closing bracket");
|
||||
throw new IllegalArgumentException("invalid definition: expected closing bracket");
|
||||
}
|
||||
} catch (Exception exception) {
|
||||
throw new RuntimeException("error in [" + filepath + "] at line [" + number + "]", exception);
|
||||
}
|
||||
}
|
||||
|
||||
ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>)resource::getClassLoader);
|
||||
|
||||
return new Whitelist(loader, whitelistClasses);
|
||||
return new Whitelist(loader, whitelistClasses, whitelistBindings);
|
||||
}
|
||||
|
||||
private WhitelistLoader() {}
|
||||
|
|
|
@ -67,7 +67,8 @@ public class WhitelistMethod {
|
|||
* is augmented as described in the class documentation.
|
||||
*/
|
||||
public WhitelistMethod(String origin, String augmentedCanonicalClassName, String methodName,
|
||||
String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.augmentedCanonicalClassName = augmentedCanonicalClassName;
|
||||
this.methodName = methodName;
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
public class BindingTest {
|
||||
public int state;
|
||||
|
||||
public BindingTest(int state0, int state1) {
|
||||
this.state = state0 + state1;
|
||||
}
|
||||
|
||||
public int testAddWithState(int istateless, double dstateless) {
|
||||
return istateless + state + (int)dstateless;
|
||||
}
|
||||
}
|
|
@ -31,6 +31,7 @@ import java.util.Map;
|
|||
public class Globals {
|
||||
private final Map<String,SFunction> syntheticMethods = new HashMap<>();
|
||||
private final Map<String,Constant> constantInitializers = new HashMap<>();
|
||||
private final Map<String,Class<?>> bindings = new HashMap<>();
|
||||
private final BitSet statements;
|
||||
|
||||
/** Create a new Globals from the set of statement boundaries */
|
||||
|
@ -54,7 +55,15 @@ public class Globals {
|
|||
throw new IllegalStateException("constant initializer: " + constant.name + " already exists");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Adds a new binding to be written as a local variable */
|
||||
public String addBinding(Class<?> type) {
|
||||
String name = "$binding$" + bindings.size();
|
||||
bindings.put(name, type);
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
/** Returns the current synthetic methods */
|
||||
public Map<String,SFunction> getSyntheticMethods() {
|
||||
return syntheticMethods;
|
||||
|
@ -64,7 +73,12 @@ public class Globals {
|
|||
public Map<String,Constant> getConstantInitializers() {
|
||||
return constantInitializers;
|
||||
}
|
||||
|
||||
|
||||
/** Returns the current bindings */
|
||||
public Map<String,Class<?>> getBindings() {
|
||||
return bindings;
|
||||
}
|
||||
|
||||
/** Returns the set of statement boundaries */
|
||||
public BitSet getStatements() {
|
||||
return statements;
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless.lookup;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
|
||||
public class PainlessBinding {
|
||||
|
||||
public final Constructor<?> javaConstructor;
|
||||
public final Method javaMethod;
|
||||
|
||||
public final Class<?> returnType;
|
||||
public final List<Class<?>> typeParameters;
|
||||
|
||||
PainlessBinding(Constructor<?> javaConstructor, Method javaMethod, Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
this.javaConstructor = javaConstructor;
|
||||
this.javaMethod = javaMethod;
|
||||
|
||||
this.returnType = returnType;
|
||||
this.typeParameters = typeParameters;
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ import java.util.Collections;
|
|||
import java.util.Map;
|
||||
|
||||
public final class PainlessClass {
|
||||
|
||||
public final Map<String, PainlessConstructor> constructors;
|
||||
|
||||
public final Map<String, PainlessMethod> staticMethods;
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
final class PainlessClassBuilder {
|
||||
|
||||
final Map<String, PainlessConstructor> constructors;
|
||||
|
||||
final Map<String, PainlessMethod> staticMethods;
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.lang.reflect.Constructor;
|
|||
import java.util.List;
|
||||
|
||||
public class PainlessConstructor {
|
||||
|
||||
public final Constructor<?> javaConstructor;
|
||||
public final List<Class<?>> typeParameters;
|
||||
public final MethodHandle methodHandle;
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.lang.invoke.MethodHandle;
|
|||
import java.lang.reflect.Field;
|
||||
|
||||
public final class PainlessField {
|
||||
|
||||
public final Field javaField;
|
||||
public final Class<?> typeParameter;
|
||||
|
||||
|
|
|
@ -37,12 +37,17 @@ public final class PainlessLookup {
|
|||
private final Map<String, Class<?>> canonicalClassNamesToClasses;
|
||||
private final Map<Class<?>, PainlessClass> classesToPainlessClasses;
|
||||
|
||||
PainlessLookup(Map<String, Class<?>> canonicalClassNamesToClasses, Map<Class<?>, PainlessClass> classesToPainlessClasses) {
|
||||
private final Map<String, PainlessBinding> painlessMethodKeysToPainlessBindings;
|
||||
|
||||
PainlessLookup(Map<String, Class<?>> canonicalClassNamesToClasses, Map<Class<?>, PainlessClass> classesToPainlessClasses,
|
||||
Map<String, PainlessBinding> painlessMethodKeysToPainlessBindings) {
|
||||
Objects.requireNonNull(canonicalClassNamesToClasses);
|
||||
Objects.requireNonNull(classesToPainlessClasses);
|
||||
|
||||
this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses);
|
||||
this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses);
|
||||
|
||||
this.painlessMethodKeysToPainlessBindings = Collections.unmodifiableMap(painlessMethodKeysToPainlessBindings);
|
||||
}
|
||||
|
||||
public boolean isValidCanonicalClassName(String canonicalClassName) {
|
||||
|
@ -162,6 +167,14 @@ public final class PainlessLookup {
|
|||
return painlessField;
|
||||
}
|
||||
|
||||
public PainlessBinding lookupPainlessBinding(String methodName, int arity) {
|
||||
Objects.requireNonNull(methodName);
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey(methodName, arity);
|
||||
|
||||
return painlessMethodKeysToPainlessBindings.get(painlessMethodKey);
|
||||
}
|
||||
|
||||
public PainlessMethod lookupFunctionalInterfacePainlessMethod(Class<?> targetClass) {
|
||||
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass);
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.painless.lookup;
|
||||
|
||||
import org.elasticsearch.painless.spi.Whitelist;
|
||||
import org.elasticsearch.painless.spi.WhitelistBinding;
|
||||
import org.elasticsearch.painless.spi.WhitelistClass;
|
||||
import org.elasticsearch.painless.spi.WhitelistConstructor;
|
||||
import org.elasticsearch.painless.spi.WhitelistField;
|
||||
|
@ -52,11 +53,11 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
private static class PainlessConstructorCacheKey {
|
||||
|
||||
private final Class<?> targetType;
|
||||
private final Class<?> targetClass;
|
||||
private final List<Class<?>> typeParameters;
|
||||
|
||||
private PainlessConstructorCacheKey(Class<?> targetType, List<Class<?>> typeParameters) {
|
||||
this.targetType = targetType;
|
||||
private PainlessConstructorCacheKey(Class<?> targetClass, List<Class<?>> typeParameters) {
|
||||
this.targetClass = targetClass;
|
||||
this.typeParameters = Collections.unmodifiableList(typeParameters);
|
||||
}
|
||||
|
||||
|
@ -72,25 +73,27 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object;
|
||||
|
||||
return Objects.equals(targetType, that.targetType) &&
|
||||
return Objects.equals(targetClass, that.targetClass) &&
|
||||
Objects.equals(typeParameters, that.typeParameters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(targetType, typeParameters);
|
||||
return Objects.hash(targetClass, typeParameters);
|
||||
}
|
||||
}
|
||||
|
||||
private static class PainlessMethodCacheKey {
|
||||
|
||||
private final Class<?> targetType;
|
||||
private final Class<?> targetClass;
|
||||
private final String methodName;
|
||||
private final Class<?> returnType;
|
||||
private final List<Class<?>> typeParameters;
|
||||
|
||||
private PainlessMethodCacheKey(Class<?> targetType, String methodName, List<Class<?>> typeParameters) {
|
||||
this.targetType = targetType;
|
||||
private PainlessMethodCacheKey(Class<?> targetClass, String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
this.targetClass = targetClass;
|
||||
this.methodName = methodName;
|
||||
this.returnType = returnType;
|
||||
this.typeParameters = Collections.unmodifiableList(typeParameters);
|
||||
}
|
||||
|
||||
|
@ -106,25 +109,26 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
PainlessMethodCacheKey that = (PainlessMethodCacheKey)object;
|
||||
|
||||
return Objects.equals(targetType, that.targetType) &&
|
||||
return Objects.equals(targetClass, that.targetClass) &&
|
||||
Objects.equals(methodName, that.methodName) &&
|
||||
Objects.equals(returnType, that.returnType) &&
|
||||
Objects.equals(typeParameters, that.typeParameters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(targetType, methodName, typeParameters);
|
||||
return Objects.hash(targetClass, methodName, returnType, typeParameters);
|
||||
}
|
||||
}
|
||||
|
||||
private static class PainlessFieldCacheKey {
|
||||
|
||||
private final Class<?> targetType;
|
||||
private final Class<?> targetClass;
|
||||
private final String fieldName;
|
||||
private final Class<?> typeParameter;
|
||||
|
||||
private PainlessFieldCacheKey(Class<?> targetType, String fieldName, Class<?> typeParameter) {
|
||||
this.targetType = targetType;
|
||||
private PainlessFieldCacheKey(Class<?> targetClass, String fieldName, Class<?> typeParameter) {
|
||||
this.targetClass = targetClass;
|
||||
this.fieldName = fieldName;
|
||||
this.typeParameter = typeParameter;
|
||||
}
|
||||
|
@ -141,20 +145,61 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
PainlessFieldCacheKey that = (PainlessFieldCacheKey) object;
|
||||
|
||||
return Objects.equals(targetType, that.targetType) &&
|
||||
return Objects.equals(targetClass, that.targetClass) &&
|
||||
Objects.equals(fieldName, that.fieldName) &&
|
||||
Objects.equals(typeParameter, that.typeParameter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(targetType, fieldName, typeParameter);
|
||||
return Objects.hash(targetClass, fieldName, typeParameter);
|
||||
}
|
||||
}
|
||||
|
||||
private static final Map<PainlessConstructorCacheKey, PainlessConstructor> painlessConstuctorCache = new HashMap<>();
|
||||
private static final Map<PainlessMethodCacheKey, PainlessMethod> painlessMethodCache = new HashMap<>();
|
||||
private static final Map<PainlessFieldCacheKey, PainlessField> painlessFieldCache = new HashMap<>();
|
||||
private static class PainlessBindingCacheKey {
|
||||
|
||||
private final Class<?> targetClass;
|
||||
private final String methodName;
|
||||
private final Class<?> methodReturnType;
|
||||
private final List<Class<?>> methodTypeParameters;
|
||||
|
||||
private PainlessBindingCacheKey(Class<?> targetClass,
|
||||
String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
|
||||
this.targetClass = targetClass;
|
||||
this.methodName = methodName;
|
||||
this.methodReturnType = returnType;
|
||||
this.methodTypeParameters = Collections.unmodifiableList(typeParameters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
if (this == object) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (object == null || getClass() != object.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PainlessBindingCacheKey that = (PainlessBindingCacheKey)object;
|
||||
|
||||
return Objects.equals(targetClass, that.targetClass) &&
|
||||
Objects.equals(methodName, that.methodName) &&
|
||||
Objects.equals(methodReturnType, that.methodReturnType) &&
|
||||
Objects.equals(methodTypeParameters, that.methodTypeParameters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(targetClass, methodName, methodReturnType, methodTypeParameters);
|
||||
}
|
||||
}
|
||||
|
||||
private static final Map<PainlessConstructorCacheKey, PainlessConstructor> painlessConstructorCache = new HashMap<>();
|
||||
private static final Map<PainlessMethodCacheKey, PainlessMethod> painlessMethodCache = new HashMap<>();
|
||||
private static final Map<PainlessFieldCacheKey, PainlessField> painlessFieldCache = new HashMap<>();
|
||||
private static final Map<PainlessBindingCacheKey, PainlessBinding> painlessBindingCache = new HashMap<>();
|
||||
|
||||
private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
|
||||
private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$");
|
||||
|
@ -197,6 +242,14 @@ public final class PainlessLookupBuilder {
|
|||
targetCanonicalClassName, whitelistField.fieldName, whitelistField.canonicalTypeNameParameter);
|
||||
}
|
||||
}
|
||||
|
||||
for (WhitelistBinding whitelistBinding : whitelist.whitelistBindings) {
|
||||
origin = whitelistBinding.origin;
|
||||
painlessLookupBuilder.addPainlessBinding(
|
||||
whitelist.classLoader, whitelistBinding.targetJavaClassName,
|
||||
whitelistBinding.methodName, whitelistBinding.returnCanonicalTypeName,
|
||||
whitelistBinding.canonicalTypeNameParameters);
|
||||
}
|
||||
}
|
||||
} catch (Exception exception) {
|
||||
throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
|
||||
|
@ -208,9 +261,13 @@ public final class PainlessLookupBuilder {
|
|||
private final Map<String, Class<?>> canonicalClassNamesToClasses;
|
||||
private final Map<Class<?>, PainlessClassBuilder> classesToPainlessClassBuilders;
|
||||
|
||||
private final Map<String, PainlessBinding> painlessMethodKeysToPainlessBindings;
|
||||
|
||||
public PainlessLookupBuilder() {
|
||||
canonicalClassNamesToClasses = new HashMap<>();
|
||||
classesToPainlessClassBuilders = new HashMap<>();
|
||||
|
||||
painlessMethodKeysToPainlessBindings = new HashMap<>();
|
||||
}
|
||||
|
||||
private Class<?> canonicalTypeNameToType(String canonicalTypeName) {
|
||||
|
@ -392,7 +449,7 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
MethodType methodType = methodHandle.type();
|
||||
|
||||
painlessConstructor = painlessConstuctorCache.computeIfAbsent(
|
||||
painlessConstructor = painlessConstructorCache.computeIfAbsent(
|
||||
new PainlessConstructorCacheKey(targetClass, typeParameters),
|
||||
key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType)
|
||||
);
|
||||
|
@ -439,7 +496,7 @@ public final class PainlessLookupBuilder {
|
|||
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
|
||||
|
||||
if (typeParameter == null) {
|
||||
throw new IllegalArgumentException("parameter type [" + canonicalTypeNameParameter + "] not found for method " +
|
||||
throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
|
@ -449,7 +506,7 @@ public final class PainlessLookupBuilder {
|
|||
Class<?> returnType = canonicalTypeNameToType(returnCanonicalTypeName);
|
||||
|
||||
if (returnType == null) {
|
||||
throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " +
|
||||
throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
|
@ -548,7 +605,7 @@ public final class PainlessLookupBuilder {
|
|||
MethodType methodType = methodHandle.type();
|
||||
|
||||
painlessMethod = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(targetClass, methodName, typeParameters),
|
||||
new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters),
|
||||
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
|
||||
|
||||
painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod);
|
||||
|
@ -588,7 +645,7 @@ public final class PainlessLookupBuilder {
|
|||
MethodType methodType = methodHandle.type();
|
||||
|
||||
painlessMethod = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(targetClass, methodName, typeParameters),
|
||||
new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters),
|
||||
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
|
||||
|
||||
painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod);
|
||||
|
@ -731,6 +788,183 @@ public final class PainlessLookupBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
public void addPainlessBinding(ClassLoader classLoader, String targetJavaClassName,
|
||||
String methodName, String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
|
||||
Objects.requireNonNull(classLoader);
|
||||
Objects.requireNonNull(targetJavaClassName);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnCanonicalTypeName);
|
||||
Objects.requireNonNull(canonicalTypeNameParameters);
|
||||
|
||||
Class<?> targetClass;
|
||||
|
||||
try {
|
||||
targetClass = Class.forName(targetJavaClassName, true, classLoader);
|
||||
} catch (ClassNotFoundException cnfe) {
|
||||
throw new IllegalArgumentException("class [" + targetJavaClassName + "] not found", cnfe);
|
||||
}
|
||||
|
||||
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
|
||||
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
|
||||
|
||||
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
|
||||
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
|
||||
|
||||
if (typeParameter == null) {
|
||||
throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for binding " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
typeParameters.add(typeParameter);
|
||||
}
|
||||
|
||||
Class<?> returnType = canonicalTypeNameToType(returnCanonicalTypeName);
|
||||
|
||||
if (returnType == null) {
|
||||
throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for binding " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
addPainlessBinding(targetClass, methodName, returnType, typeParameters);
|
||||
}
|
||||
|
||||
public void addPainlessBinding(Class<?> targetClass, String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
|
||||
Objects.requireNonNull(targetClass);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnType);
|
||||
Objects.requireNonNull(typeParameters);
|
||||
|
||||
if (targetClass == def.class) {
|
||||
throw new IllegalArgumentException("cannot add binding as reserved class [" + DEF_CLASS_NAME + "]");
|
||||
}
|
||||
|
||||
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
|
||||
|
||||
Constructor<?>[] javaConstructors = targetClass.getConstructors();
|
||||
Constructor<?> javaConstructor = null;
|
||||
|
||||
for (Constructor<?> eachJavaConstructor : javaConstructors) {
|
||||
if (eachJavaConstructor.getDeclaringClass() == targetClass) {
|
||||
if (javaConstructor != null) {
|
||||
throw new IllegalArgumentException("binding [" + targetCanonicalClassName + "] cannot have multiple constructors");
|
||||
}
|
||||
|
||||
javaConstructor = eachJavaConstructor;
|
||||
}
|
||||
}
|
||||
|
||||
if (javaConstructor == null) {
|
||||
throw new IllegalArgumentException("binding [" + targetCanonicalClassName + "] must have exactly one constructor");
|
||||
}
|
||||
|
||||
int constructorTypeParametersSize = javaConstructor.getParameterCount();
|
||||
|
||||
for (int typeParameterIndex = 0; typeParameterIndex < constructorTypeParametersSize; ++typeParameterIndex) {
|
||||
Class<?> typeParameter = typeParameters.get(typeParameterIndex);
|
||||
|
||||
if (isValidType(typeParameter) == false) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||
"for binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
Class<?> javaTypeParameter = javaConstructor.getParameterTypes()[typeParameterIndex];
|
||||
|
||||
if (isValidType(javaTypeParameter) == false) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||
"for binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
if (javaTypeParameter != typeToJavaType(typeParameter)) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaTypeParameter) + "] " +
|
||||
"does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
||||
"for binding [[" + targetClass.getCanonicalName() + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid method name [" + methodName + "] for binding [" + targetCanonicalClassName + "].");
|
||||
}
|
||||
|
||||
Method[] javaMethods = targetClass.getMethods();
|
||||
Method javaMethod = null;
|
||||
|
||||
for (Method eachJavaMethod : javaMethods) {
|
||||
if (eachJavaMethod.getDeclaringClass() == targetClass) {
|
||||
if (javaMethod != null) {
|
||||
throw new IllegalArgumentException("binding [" + targetCanonicalClassName + "] cannot have multiple methods");
|
||||
}
|
||||
|
||||
javaMethod = eachJavaMethod;
|
||||
}
|
||||
}
|
||||
|
||||
if (javaMethod == null) {
|
||||
throw new IllegalArgumentException("binding [" + targetCanonicalClassName + "] must have exactly one method");
|
||||
}
|
||||
|
||||
int methodTypeParametersSize = javaMethod.getParameterCount();
|
||||
|
||||
for (int typeParameterIndex = 0; typeParameterIndex < methodTypeParametersSize; ++typeParameterIndex) {
|
||||
Class<?> typeParameter = typeParameters.get(constructorTypeParametersSize + typeParameterIndex);
|
||||
|
||||
if (isValidType(typeParameter) == false) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||
"for binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
Class<?> javaTypeParameter = javaMethod.getParameterTypes()[typeParameterIndex];
|
||||
|
||||
if (isValidType(javaTypeParameter) == false) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||
"for binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
if (javaTypeParameter != typeToJavaType(typeParameter)) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaTypeParameter) + "] " +
|
||||
"does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
||||
"for binding [[" + targetClass.getCanonicalName() + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (javaMethod.getReturnType() != typeToJavaType(returnType)) {
|
||||
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " +
|
||||
"does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " +
|
||||
"for binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey(methodName, constructorTypeParametersSize + methodTypeParametersSize);
|
||||
PainlessBinding painlessBinding = painlessMethodKeysToPainlessBindings.get(painlessMethodKey);
|
||||
|
||||
if (painlessBinding == null) {
|
||||
Constructor<?> finalJavaConstructor = javaConstructor;
|
||||
Method finalJavaMethod = javaMethod;
|
||||
|
||||
painlessBinding = painlessBindingCache.computeIfAbsent(
|
||||
new PainlessBindingCacheKey(targetClass, methodName, returnType, typeParameters),
|
||||
key -> new PainlessBinding(finalJavaConstructor, finalJavaMethod, returnType, typeParameters));
|
||||
|
||||
painlessMethodKeysToPainlessBindings.put(painlessMethodKey, painlessBinding);
|
||||
} else if (painlessBinding.javaConstructor.equals(javaConstructor) == false ||
|
||||
painlessBinding.javaMethod.equals(javaMethod) == false ||
|
||||
painlessBinding.returnType != returnType ||
|
||||
painlessBinding.typeParameters.equals(typeParameters) == false) {
|
||||
throw new IllegalArgumentException("cannot have bindings " +
|
||||
"[[" + targetCanonicalClassName + "], " +
|
||||
"[" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(returnType) + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], " +
|
||||
"[" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(painlessBinding.returnType) + "], " +
|
||||
typesToCanonicalTypeNames(painlessBinding.typeParameters) + "] and " +
|
||||
"with the same name and arity but different constructors or methods");
|
||||
}
|
||||
}
|
||||
|
||||
public PainlessLookup build() {
|
||||
copyPainlessClassMembers();
|
||||
cacheRuntimeHandles();
|
||||
|
@ -742,7 +976,7 @@ public final class PainlessLookupBuilder {
|
|||
classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build());
|
||||
}
|
||||
|
||||
return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses);
|
||||
return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses, painlessMethodKeysToPainlessBindings);
|
||||
}
|
||||
|
||||
private void copyPainlessClassMembers() {
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
|
||||
public class PainlessMethod {
|
||||
|
||||
public final Method javaMethod;
|
||||
public final Class<?> targetClass;
|
||||
public final Class<?> returnType;
|
||||
|
|
|
@ -24,8 +24,12 @@ import org.elasticsearch.painless.Locals;
|
|||
import org.elasticsearch.painless.Locals.LocalMethod;
|
||||
import org.elasticsearch.painless.Location;
|
||||
import org.elasticsearch.painless.MethodWriter;
|
||||
import org.elasticsearch.painless.lookup.PainlessBinding;
|
||||
import org.objectweb.asm.Label;
|
||||
import org.objectweb.asm.Type;
|
||||
import org.objectweb.asm.commons.Method;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
@ -41,6 +45,7 @@ public final class ECallLocal extends AExpression {
|
|||
private final List<AExpression> arguments;
|
||||
|
||||
private LocalMethod method = null;
|
||||
private PainlessBinding binding = null;
|
||||
|
||||
public ECallLocal(Location location, String name, List<AExpression> arguments) {
|
||||
super(location);
|
||||
|
@ -60,32 +65,71 @@ public final class ECallLocal extends AExpression {
|
|||
void analyze(Locals locals) {
|
||||
method = locals.getMethod(name, arguments.size());
|
||||
|
||||
|
||||
if (method == null) {
|
||||
throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments."));
|
||||
binding = locals.getPainlessLookup().lookupPainlessBinding(name, arguments.size());
|
||||
|
||||
if (binding == null) {
|
||||
throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments."));
|
||||
}
|
||||
}
|
||||
|
||||
List<Class<?>> typeParameters = new ArrayList<>(method == null ? binding.typeParameters : method.typeParameters);
|
||||
|
||||
for (int argument = 0; argument < arguments.size(); ++argument) {
|
||||
AExpression expression = arguments.get(argument);
|
||||
|
||||
expression.expected = method.typeParameters.get(argument);
|
||||
expression.expected = typeParameters.get(argument);
|
||||
expression.internal = true;
|
||||
expression.analyze(locals);
|
||||
arguments.set(argument, expression.cast(locals));
|
||||
}
|
||||
|
||||
statement = true;
|
||||
actual = method.returnType;
|
||||
actual = method == null ? binding.returnType : method.returnType;
|
||||
}
|
||||
|
||||
@Override
|
||||
void write(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
for (AExpression argument : arguments) {
|
||||
argument.write(writer, globals);
|
||||
}
|
||||
if (method == null) {
|
||||
String name = globals.addBinding(binding.javaConstructor.getDeclaringClass());
|
||||
Type type = Type.getType(binding.javaConstructor.getDeclaringClass());
|
||||
int javaConstructorParameterCount = binding.javaConstructor.getParameterCount();
|
||||
|
||||
writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString()));
|
||||
Label nonNull = new Label();
|
||||
|
||||
writer.loadThis();
|
||||
writer.getField(CLASS_TYPE, name, type);
|
||||
writer.ifNonNull(nonNull);
|
||||
writer.loadThis();
|
||||
writer.newInstance(type);
|
||||
writer.dup();
|
||||
|
||||
for (int argument = 0; argument < javaConstructorParameterCount; ++argument) {
|
||||
arguments.get(argument).write(writer, globals);
|
||||
}
|
||||
|
||||
writer.invokeConstructor(type, Method.getMethod(binding.javaConstructor));
|
||||
writer.putField(CLASS_TYPE, name, type);
|
||||
|
||||
writer.mark(nonNull);
|
||||
writer.loadThis();
|
||||
writer.getField(CLASS_TYPE, name, type);
|
||||
|
||||
for (int argument = 0; argument < binding.javaMethod.getParameterCount(); ++argument) {
|
||||
arguments.get(argument + javaConstructorParameterCount).write(writer, globals);
|
||||
}
|
||||
|
||||
writer.invokeVirtual(type, Method.getMethod(binding.javaMethod));
|
||||
} else {
|
||||
for (AExpression argument : arguments) {
|
||||
argument.write(writer, globals);
|
||||
}
|
||||
|
||||
writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -359,6 +359,13 @@ public final class SSource extends AStatement {
|
|||
clinit.endMethod();
|
||||
}
|
||||
|
||||
// Write binding variables
|
||||
for (Map.Entry<String, Class<?>> binding : globals.getBindings().entrySet()) {
|
||||
String name = binding.getKey();
|
||||
String descriptor = Type.getType(binding.getValue()).getDescriptor();
|
||||
visitor.visitField(Opcodes.ACC_PRIVATE, name, descriptor, null, null).visitEnd();
|
||||
}
|
||||
|
||||
// Write any needsVarName methods for used variables
|
||||
for (org.objectweb.asm.commons.Method needsMethod : scriptClassInfo.getNeedsMethods()) {
|
||||
String name = needsMethod.getName();
|
||||
|
|
|
@ -132,24 +132,6 @@ class org.elasticsearch.index.mapper.IpFieldMapper$IpFieldType$IpScriptDocValues
|
|||
List getValues()
|
||||
}
|
||||
|
||||
# for testing.
|
||||
# currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods
|
||||
class org.elasticsearch.painless.FeatureTest no_import {
|
||||
int z
|
||||
()
|
||||
(int,int)
|
||||
int getX()
|
||||
int getY()
|
||||
void setX(int)
|
||||
void setY(int)
|
||||
boolean overloadedStatic()
|
||||
boolean overloadedStatic(boolean)
|
||||
Object twoFunctionsOfX(Function,Function)
|
||||
void listInput(List)
|
||||
int org.elasticsearch.painless.FeatureTestAugmentation getTotal()
|
||||
int org.elasticsearch.painless.FeatureTestAugmentation addToTotal(int)
|
||||
}
|
||||
|
||||
class org.elasticsearch.search.lookup.FieldLookup {
|
||||
def getValue()
|
||||
List getValues()
|
||||
|
@ -174,4 +156,26 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Term {
|
|||
class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc {
|
||||
int getLength()
|
||||
float getFreq()
|
||||
}
|
||||
|
||||
# for testing
|
||||
class org.elasticsearch.painless.FeatureTest no_import {
|
||||
int z
|
||||
()
|
||||
(int,int)
|
||||
int getX()
|
||||
int getY()
|
||||
void setX(int)
|
||||
void setY(int)
|
||||
boolean overloadedStatic()
|
||||
boolean overloadedStatic(boolean)
|
||||
Object twoFunctionsOfX(Function,Function)
|
||||
void listInput(List)
|
||||
int org.elasticsearch.painless.FeatureTestAugmentation getTotal()
|
||||
int org.elasticsearch.painless.FeatureTestAugmentation addToTotal(int)
|
||||
}
|
||||
|
||||
# for testing
|
||||
static {
|
||||
int testAddWithState(int, int, int, double) bound_to org.elasticsearch.painless.BindingTest
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class BindingsTests extends ScriptTestCase {
|
||||
|
||||
public void testBasicBinding() {
|
||||
assertEquals(15, exec("testAddWithState(4, 5, 6, 0.0)"));
|
||||
}
|
||||
|
||||
public void testRepeatedBinding() {
|
||||
String script = "testAddWithState(4, 5, params.test, 0.0)";
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, Collections.emptyMap());
|
||||
ExecutableScript executableScript = factory.newInstance(params);
|
||||
|
||||
executableScript.setNextVar("test", 5);
|
||||
assertEquals(14, executableScript.run());
|
||||
|
||||
executableScript.setNextVar("test", 4);
|
||||
assertEquals(13, executableScript.run());
|
||||
|
||||
executableScript.setNextVar("test", 7);
|
||||
assertEquals(16, executableScript.run());
|
||||
}
|
||||
|
||||
public void testBoundBinding() {
|
||||
String script = "testAddWithState(4, params.bound, params.test, 0.0)";
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, Collections.emptyMap());
|
||||
ExecutableScript executableScript = factory.newInstance(params);
|
||||
|
||||
executableScript.setNextVar("test", 5);
|
||||
executableScript.setNextVar("bound", 1);
|
||||
assertEquals(10, executableScript.run());
|
||||
|
||||
executableScript.setNextVar("test", 4);
|
||||
executableScript.setNextVar("bound", 2);
|
||||
assertEquals(9, executableScript.run());
|
||||
}
|
||||
}
|
|
@ -83,7 +83,6 @@ thirdPartyAudit.excludes = [
|
|||
'io.netty.internal.tcnative.SSLContext',
|
||||
|
||||
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
|
||||
'org.bouncycastle.asn1.x500.X500Name',
|
||||
'org.bouncycastle.cert.X509v3CertificateBuilder',
|
||||
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
|
||||
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
|
||||
|
@ -163,3 +162,11 @@ thirdPartyAudit.excludes = [
|
|||
'org.conscrypt.Conscrypt',
|
||||
'org.conscrypt.HandshakeListener'
|
||||
]
|
||||
|
||||
if (project.inFipsJvm == false) {
|
||||
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
|
||||
// a FIPS JVM with BouncyCastleFIPS Provider
|
||||
thirdPartyAudit.excludes += [
|
||||
'org.bouncycastle.asn1.x500.X500Name'
|
||||
]
|
||||
}
|
||||
|
|
|
@ -19,39 +19,33 @@
|
|||
|
||||
package org.elasticsearch.discovery.file;
|
||||
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.discovery.zen.UnicastHostsProvider;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.DiscoveryPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* Plugin for providing file-based unicast hosts discovery. The list of unicast hosts
|
||||
* is obtained by reading the {@link FileBasedUnicastHostsProvider#UNICAST_HOSTS_FILE} in
|
||||
* the {@link Environment#configFile()}/discovery-file directory.
|
||||
*/
|
||||
public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
|
||||
|
||||
private final Settings settings;
|
||||
private final Path configPath;
|
||||
private final DeprecationLogger deprecationLogger;
|
||||
static final String DEPRECATION_MESSAGE
|
||||
= "File-based discovery is now built into Elasticsearch and does not require the discovery-file plugin";
|
||||
|
||||
public FileBasedDiscoveryPlugin(Settings settings, Path configPath) {
|
||||
this.settings = settings;
|
||||
this.configPath = configPath;
|
||||
public FileBasedDiscoveryPlugin(Settings settings) {
|
||||
deprecationLogger = new DeprecationLogger(Loggers.getLogger(this.getClass(), settings));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Supplier<UnicastHostsProvider>> getZenHostsProviders(TransportService transportService,
|
||||
NetworkService networkService) {
|
||||
return Collections.singletonMap(
|
||||
"file",
|
||||
() -> new FileBasedUnicastHostsProvider(new Environment(settings, configPath)));
|
||||
deprecationLogger.deprecated(DEPRECATION_MESSAGE);
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.file;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.discovery.zen.UnicastHostsProvider;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* An implementation of {@link UnicastHostsProvider} that reads hosts/ports
|
||||
* from {@link #UNICAST_HOSTS_FILE}.
|
||||
*
|
||||
* Each unicast host/port that is part of the discovery process must be listed on
|
||||
* a separate line. If the port is left off an entry, a default port of 9300 is
|
||||
* assumed. An example unicast hosts file could read:
|
||||
*
|
||||
* 67.81.244.10
|
||||
* 67.81.244.11:9305
|
||||
* 67.81.244.15:9400
|
||||
*/
|
||||
class FileBasedUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider {
|
||||
|
||||
static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt";
|
||||
|
||||
private final Path unicastHostsFilePath;
|
||||
|
||||
FileBasedUnicastHostsProvider(Environment environment) {
|
||||
super(environment.settings());
|
||||
this.unicastHostsFilePath = environment.configFile().resolve("discovery-file").resolve(UNICAST_HOSTS_FILE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<TransportAddress> buildDynamicHosts(HostsResolver hostsResolver) {
|
||||
List<String> hostsList;
|
||||
try (Stream<String> lines = Files.lines(unicastHostsFilePath)) {
|
||||
hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments
|
||||
.collect(Collectors.toList());
|
||||
} catch (FileNotFoundException | NoSuchFileException e) {
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[discovery-file] Failed to find unicast hosts file [{}]",
|
||||
unicastHostsFilePath), e);
|
||||
hostsList = Collections.emptyList();
|
||||
} catch (IOException e) {
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[discovery-file] Error reading unicast hosts file [{}]",
|
||||
unicastHostsFilePath), e);
|
||||
hostsList = Collections.emptyList();
|
||||
}
|
||||
|
||||
final List<TransportAddress> dynamicHosts = hostsResolver.resolveHosts(hostsList, 1);
|
||||
logger.debug("[discovery-file] Using dynamic discovery nodes {}", dynamicHosts);
|
||||
return dynamicHosts;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.file;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.elasticsearch.discovery.file.FileBasedDiscoveryPlugin.DEPRECATION_MESSAGE;
|
||||
|
||||
public class FileBasedDiscoveryPluginDeprecationTests extends ESTestCase {
|
||||
public void testDeprecationWarning() {
|
||||
new FileBasedDiscoveryPlugin(Settings.EMPTY).getZenHostsProviders(null, null);
|
||||
assertWarnings(DEPRECATION_MESSAGE);
|
||||
}
|
||||
}
|
|
@ -2141,3 +2141,9 @@ if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
|
|||
'javax.xml.bind.Unmarshaller'
|
||||
]
|
||||
}
|
||||
|
||||
if (project.inFipsJvm) {
|
||||
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
|
||||
// rather than provide a long list of exclusions, disable the check on FIPS.
|
||||
thirdPartyAudit.enabled = false
|
||||
}
|
||||
|
|
|
@ -62,7 +62,6 @@ thirdPartyAudit.excludes = [
|
|||
'io.netty.internal.tcnative.SSLContext',
|
||||
|
||||
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
|
||||
'org.bouncycastle.asn1.x500.X500Name',
|
||||
'org.bouncycastle.cert.X509v3CertificateBuilder',
|
||||
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
|
||||
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
|
||||
|
@ -141,4 +140,11 @@ thirdPartyAudit.excludes = [
|
|||
'org.conscrypt.BufferAllocator',
|
||||
'org.conscrypt.Conscrypt',
|
||||
'org.conscrypt.HandshakeListener'
|
||||
]
|
||||
]
|
||||
if (project.inFipsJvm == false) {
|
||||
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
|
||||
// a FIPS JVM with BouncyCastleFIPS Provider
|
||||
thirdPartyAudit.excludes += [
|
||||
'org.bouncycastle.asn1.x500.X500Name'
|
||||
]
|
||||
}
|
|
@ -131,9 +131,7 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction;
|
|||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction;
|
||||
import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction;
|
||||
import org.elasticsearch.action.admin.indices.shrink.ResizeAction;
|
||||
import org.elasticsearch.action.admin.indices.shrink.ShrinkAction;
|
||||
import org.elasticsearch.action.admin.indices.shrink.TransportResizeAction;
|
||||
import org.elasticsearch.action.admin.indices.shrink.TransportShrinkAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction;
|
||||
|
@ -446,7 +444,6 @@ public class ActionModule extends AbstractModule {
|
|||
actions.register(IndicesSegmentsAction.INSTANCE, TransportIndicesSegmentsAction.class);
|
||||
actions.register(IndicesShardStoresAction.INSTANCE, TransportIndicesShardStoresAction.class);
|
||||
actions.register(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class);
|
||||
actions.register(ShrinkAction.INSTANCE, TransportShrinkAction.class);
|
||||
actions.register(ResizeAction.INSTANCE, TransportResizeAction.class);
|
||||
actions.register(RolloverAction.INSTANCE, TransportRolloverAction.class);
|
||||
actions.register(DeleteIndexAction.INSTANCE, TransportDeleteIndexAction.class);
|
||||
|
|
|
@ -55,8 +55,6 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
|
||||
private final Set<Alias> aliases = new HashSet<>();
|
||||
|
||||
private final Map<String, IndexMetaData.Custom> customs = new HashMap<>();
|
||||
|
||||
private final Set<ClusterBlock> blocks = new HashSet<>();
|
||||
|
||||
private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT;
|
||||
|
@ -83,11 +81,6 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
return this;
|
||||
}
|
||||
|
||||
public CreateIndexClusterStateUpdateRequest customs(Map<String, IndexMetaData.Custom> customs) {
|
||||
this.customs.putAll(customs);
|
||||
return this;
|
||||
}
|
||||
|
||||
public CreateIndexClusterStateUpdateRequest blocks(Set<ClusterBlock> blocks) {
|
||||
this.blocks.addAll(blocks);
|
||||
return this;
|
||||
|
@ -146,10 +139,6 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
return aliases;
|
||||
}
|
||||
|
||||
public Map<String, IndexMetaData.Custom> customs() {
|
||||
return customs;
|
||||
}
|
||||
|
||||
public Set<ClusterBlock> blocks() {
|
||||
return blocks;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
|||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -58,9 +57,9 @@ import java.util.Objects;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* A request to create an index. Best created with {@link org.elasticsearch.client.Requests#createIndexRequest(String)}.
|
||||
|
@ -87,8 +86,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
|
||||
private final Set<Alias> aliases = new HashSet<>();
|
||||
|
||||
private final Map<String, IndexMetaData.Custom> customs = new HashMap<>();
|
||||
|
||||
private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT;
|
||||
|
||||
public CreateIndexRequest() {
|
||||
|
@ -388,18 +385,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
} else if (ALIASES.match(name, deprecationHandler)) {
|
||||
aliases((Map<String, Object>) entry.getValue());
|
||||
} else {
|
||||
// maybe custom?
|
||||
IndexMetaData.Custom proto = IndexMetaData.lookupPrototype(name);
|
||||
if (proto != null) {
|
||||
try {
|
||||
customs.put(name, proto.fromMap((Map<String, Object>) entry.getValue()));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse custom metadata for [{}]", name);
|
||||
}
|
||||
} else {
|
||||
// found a key which is neither custom defined nor one of the supported ones
|
||||
throw new ElasticsearchParseException("unknown key [{}] for create index", name);
|
||||
}
|
||||
throw new ElasticsearchParseException("unknown key [{}] for create index", name);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
|
@ -413,18 +399,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return this.aliases;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds custom metadata to the index to be created.
|
||||
*/
|
||||
public CreateIndexRequest custom(IndexMetaData.Custom custom) {
|
||||
customs.put(custom.type(), custom);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Map<String, IndexMetaData.Custom> customs() {
|
||||
return this.customs;
|
||||
}
|
||||
|
||||
public ActiveShardCount waitForActiveShards() {
|
||||
return waitForActiveShards;
|
||||
}
|
||||
|
@ -474,11 +448,13 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
}
|
||||
mappings.put(type, source);
|
||||
}
|
||||
int customSize = in.readVInt();
|
||||
for (int i = 0; i < customSize; i++) {
|
||||
String type = in.readString();
|
||||
IndexMetaData.Custom customIndexMetaData = IndexMetaData.lookupPrototypeSafe(type).readFrom(in);
|
||||
customs.put(type, customIndexMetaData);
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
// This used to be the size of custom metadata classes
|
||||
int customSize = in.readVInt();
|
||||
assert customSize == 0 : "unexpected custom metadata when none is supported";
|
||||
if (customSize > 0) {
|
||||
throw new IllegalStateException("unexpected custom metadata when none is supported");
|
||||
}
|
||||
}
|
||||
int aliasesSize = in.readVInt();
|
||||
for (int i = 0; i < aliasesSize; i++) {
|
||||
|
@ -501,10 +477,9 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
out.writeVInt(customs.size());
|
||||
for (Map.Entry<String, IndexMetaData.Custom> entry : customs.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
// Size of custom index metadata, which is removed
|
||||
out.writeVInt(0);
|
||||
}
|
||||
out.writeVInt(aliases.size());
|
||||
for (Alias alias : aliases) {
|
||||
|
@ -542,10 +517,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
alias.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
for (Map.Entry<String, IndexMetaData.Custom> entry : customs.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue(), params);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
|
|||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
|
@ -224,14 +223,6 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<Create
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds custom metadata to the index to be created.
|
||||
*/
|
||||
public CreateIndexRequestBuilder addCustom(IndexMetaData.Custom custom) {
|
||||
request.custom(custom);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the settings and mappings as a single source.
|
||||
*/
|
||||
|
|
|
@ -75,7 +75,7 @@ public class TransportCreateIndexAction extends TransportMasterNodeAction<Create
|
|||
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, indexName, request.index())
|
||||
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
|
||||
.settings(request.settings()).mappings(request.mappings())
|
||||
.aliases(request.aliases()).customs(request.customs())
|
||||
.aliases(request.aliases())
|
||||
.waitForActiveShards(request.waitForActiveShards());
|
||||
|
||||
createIndexService.createIndex(updateRequest, ActionListener.wrap(response ->
|
||||
|
|
|
@ -185,7 +185,6 @@ public class TransportResizeAction extends TransportMasterNodeAction<ResizeReque
|
|||
.masterNodeTimeout(targetIndex.masterNodeTimeout())
|
||||
.settings(targetIndex.settings())
|
||||
.aliases(targetIndex.aliases())
|
||||
.customs(targetIndex.customs())
|
||||
.waitForActiveShards(targetIndex.waitForActiveShards())
|
||||
.recoverFrom(metaData.getIndex())
|
||||
.resizeType(resizeRequest.getResizeType())
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.indices.shrink;
|
||||
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
/**
|
||||
* Main class to initiate shrinking an index into a new index
|
||||
* This class is only here for backwards compatibility. It will be replaced by
|
||||
* TransportResizeAction in 7.x once this is backported
|
||||
*/
|
||||
public class TransportShrinkAction extends TransportResizeAction {
|
||||
|
||||
@Inject
|
||||
public TransportShrinkAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, MetaDataCreateIndexService createIndexService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Client client) {
|
||||
super(settings, ShrinkAction.NAME, transportService, clusterService, threadPool, createIndexService, actionFilters,
|
||||
indexNameExpressionResolver, client);
|
||||
}
|
||||
}
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
|
|||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -61,9 +60,9 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* A request to create an index template.
|
||||
|
@ -88,8 +87,6 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
|
||||
private final Set<Alias> aliases = new HashSet<>();
|
||||
|
||||
private Map<String, IndexMetaData.Custom> customs = new HashMap<>();
|
||||
|
||||
private Integer version;
|
||||
|
||||
public PutIndexTemplateRequest() {
|
||||
|
@ -353,15 +350,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
} else if (name.equals("aliases")) {
|
||||
aliases((Map<String, Object>) entry.getValue());
|
||||
} else {
|
||||
// maybe custom?
|
||||
IndexMetaData.Custom proto = IndexMetaData.lookupPrototype(name);
|
||||
if (proto != null) {
|
||||
try {
|
||||
customs.put(name, proto.fromMap((Map<String, Object>) entry.getValue()));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse custom metadata for [{}]", name);
|
||||
}
|
||||
}
|
||||
throw new ElasticsearchParseException("unknown key [{}] in the template ", name);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
|
@ -395,15 +384,6 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
return source(XContentHelper.convertToMap(source, true, xContentType).v2());
|
||||
}
|
||||
|
||||
public PutIndexTemplateRequest custom(IndexMetaData.Custom custom) {
|
||||
customs.put(custom.type(), custom);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Map<String, IndexMetaData.Custom> customs() {
|
||||
return this.customs;
|
||||
}
|
||||
|
||||
public Set<Alias> aliases() {
|
||||
return this.aliases;
|
||||
}
|
||||
|
@ -494,11 +474,13 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
String mappingSource = in.readString();
|
||||
mappings.put(type, mappingSource);
|
||||
}
|
||||
int customSize = in.readVInt();
|
||||
for (int i = 0; i < customSize; i++) {
|
||||
String type = in.readString();
|
||||
IndexMetaData.Custom customIndexMetaData = IndexMetaData.lookupPrototypeSafe(type).readFrom(in);
|
||||
customs.put(type, customIndexMetaData);
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
// Used to be used for custom index metadata
|
||||
int customSize = in.readVInt();
|
||||
assert customSize == 0 : "expected not to have any custom metadata";
|
||||
if (customSize > 0) {
|
||||
throw new IllegalStateException("unexpected custom metadata when none is supported");
|
||||
}
|
||||
}
|
||||
int aliasesSize = in.readVInt();
|
||||
for (int i = 0; i < aliasesSize; i++) {
|
||||
|
@ -525,10 +507,8 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
out.writeVInt(customs.size());
|
||||
for (Map.Entry<String, IndexMetaData.Custom> entry : customs.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
out.writeVInt(aliases.size());
|
||||
for (Alias alias : aliases) {
|
||||
|
@ -565,10 +545,6 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
}
|
||||
builder.endObject();
|
||||
|
||||
for (Map.Entry<String, IndexMetaData.Custom> entry : customs.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue(), params);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,7 +84,6 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction<P
|
|||
.settings(templateSettingsBuilder.build())
|
||||
.mappings(request.mappings())
|
||||
.aliases(request.aliases())
|
||||
.customs(request.customs())
|
||||
.create(request.create())
|
||||
.masterTimeout(request.masterNodeTimeout())
|
||||
.version(request.version()),
|
||||
|
|
|
@ -166,14 +166,14 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* Whether this field is indexed for search on all indices.
|
||||
* Whether this field can be aggregated on all indices.
|
||||
*/
|
||||
public boolean isAggregatable() {
|
||||
return isAggregatable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this field can be aggregated on all indices.
|
||||
* Whether this field is indexed for search on all indices.
|
||||
*/
|
||||
public boolean isSearchable() {
|
||||
return isSearchable;
|
||||
|
|
|
@ -111,7 +111,6 @@ public final class FieldCapabilitiesRequest extends ActionRequest implements Ind
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* The list of indices to lookup
|
||||
*/
|
||||
public FieldCapabilitiesRequest indices(String... indices) {
|
||||
|
|
|
@ -35,6 +35,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
|
@ -56,15 +57,15 @@ public class FieldCapabilitiesResponse extends ActionResponse implements ToXCont
|
|||
|
||||
private FieldCapabilitiesResponse(Map<String, Map<String, FieldCapabilities>> responseMap,
|
||||
List<FieldCapabilitiesIndexResponse> indexResponses) {
|
||||
this.responseMap = responseMap;
|
||||
this.indexResponses = indexResponses;
|
||||
this.responseMap = Objects.requireNonNull(responseMap);
|
||||
this.indexResponses = Objects.requireNonNull(indexResponses);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used for serialization
|
||||
*/
|
||||
FieldCapabilitiesResponse() {
|
||||
this.responseMap = Collections.emptyMap();
|
||||
this(Collections.emptyMap(), Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -81,6 +82,7 @@ public class FieldCapabilitiesResponse extends ActionResponse implements ToXCont
|
|||
List<FieldCapabilitiesIndexResponse> getIndexResponses() {
|
||||
return indexResponses;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Get the field capabilities per type for the provided {@code field}.
|
||||
|
|
|
@ -90,7 +90,7 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction<Fie
|
|||
}
|
||||
};
|
||||
if (totalNumRequest == 0) {
|
||||
listener.onResponse(new FieldCapabilitiesResponse());
|
||||
listener.onResponse(new FieldCapabilitiesResponse(Collections.emptyMap()));
|
||||
} else {
|
||||
ActionListener<FieldCapabilitiesIndexResponse> innerListener = new ActionListener<FieldCapabilitiesIndexResponse>() {
|
||||
@Override
|
||||
|
|
|
@ -171,9 +171,11 @@ public class SimulatePipelineRequest extends ActionRequest implements ToXContent
|
|||
return new Parsed(pipeline, ingestDocumentList, verbose);
|
||||
}
|
||||
|
||||
static Parsed parse(Map<String, Object> config, boolean verbose, IngestService pipelineStore) throws Exception {
|
||||
static Parsed parse(Map<String, Object> config, boolean verbose, IngestService ingestService) throws Exception {
|
||||
Map<String, Object> pipelineConfig = ConfigurationUtils.readMap(null, null, config, Fields.PIPELINE);
|
||||
Pipeline pipeline = Pipeline.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactories());
|
||||
Pipeline pipeline = Pipeline.create(
|
||||
SIMULATED_PIPELINE_ID, pipelineConfig, ingestService.getProcessorFactories(), ingestService.getScriptService()
|
||||
);
|
||||
List<IngestDocument> ingestDocumentList = parseDocs(config);
|
||||
return new Parsed(pipeline, ingestDocumentList, verbose);
|
||||
}
|
||||
|
|
|
@ -455,7 +455,7 @@ public interface Client extends ElasticsearchClient, Releasable {
|
|||
/**
|
||||
* Builder for the field capabilities request.
|
||||
*/
|
||||
FieldCapabilitiesRequestBuilder prepareFieldCaps();
|
||||
FieldCapabilitiesRequestBuilder prepareFieldCaps(String... indices);
|
||||
|
||||
/**
|
||||
* An action that returns the field capabilities from the provided request
|
||||
|
|
|
@ -651,8 +651,8 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
|||
}
|
||||
|
||||
@Override
|
||||
public FieldCapabilitiesRequestBuilder prepareFieldCaps() {
|
||||
return new FieldCapabilitiesRequestBuilder(this, FieldCapabilitiesAction.INSTANCE);
|
||||
public FieldCapabilitiesRequestBuilder prepareFieldCaps(String... indices) {
|
||||
return new FieldCapabilitiesRequestBuilder(this, FieldCapabilitiesAction.INSTANCE, indices);
|
||||
}
|
||||
|
||||
static class Admin implements AdminClient {
|
||||
|
|
|
@ -0,0 +1,188 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.cluster.Diffable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This is a {@code Map<String, String>} that implements AbstractDiffable so it
|
||||
* can be used for cluster state purposes
|
||||
*/
|
||||
public class DiffableStringMap extends AbstractMap<String, String> implements Diffable<DiffableStringMap> {
|
||||
|
||||
private final Map<String, String> innerMap;
|
||||
|
||||
DiffableStringMap(final Map<String, String> map) {
|
||||
this.innerMap = map;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
DiffableStringMap(final StreamInput in) throws IOException {
|
||||
this.innerMap = (Map<String, String>) (Map) in.readMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String put(String key, String value) {
|
||||
return innerMap.put(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<String, String>> entrySet() {
|
||||
return innerMap.entrySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeMap((Map<String, Object>) (Map) innerMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Diff<DiffableStringMap> diff(DiffableStringMap previousState) {
|
||||
return new DiffableStringMapDiff(previousState, this);
|
||||
}
|
||||
|
||||
public static Diff<DiffableStringMap> readDiffFrom(StreamInput in) throws IOException {
|
||||
return new DiffableStringMapDiff(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (obj instanceof DiffableStringMap) {
|
||||
DiffableStringMap other = (DiffableStringMap) obj;
|
||||
return innerMap.equals(other.innerMap);
|
||||
} else if (obj instanceof Map) {
|
||||
Map other = (Map) obj;
|
||||
return innerMap.equals(other);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return innerMap.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DiffableStringMap[" + innerMap.toString() + "]";
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents differences between two DiffableStringMaps.
|
||||
*/
|
||||
public static class DiffableStringMapDiff implements Diff<DiffableStringMap> {
|
||||
|
||||
private final List<String> deletes;
|
||||
private final Map<String, String> upserts; // diffs also become upserts
|
||||
|
||||
private DiffableStringMapDiff(DiffableStringMap before, DiffableStringMap after) {
|
||||
final List<String> tempDeletes = new ArrayList<>();
|
||||
final Map<String, String> tempUpserts = new HashMap<>();
|
||||
for (String key : before.keySet()) {
|
||||
if (after.containsKey(key) == false) {
|
||||
tempDeletes.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
for (Map.Entry<String, String> partIter : after.entrySet()) {
|
||||
String beforePart = before.get(partIter.getKey());
|
||||
if (beforePart == null) {
|
||||
tempUpserts.put(partIter.getKey(), partIter.getValue());
|
||||
} else if (partIter.getValue().equals(beforePart) == false) {
|
||||
tempUpserts.put(partIter.getKey(), partIter.getValue());
|
||||
}
|
||||
}
|
||||
deletes = tempDeletes;
|
||||
upserts = tempUpserts;
|
||||
}
|
||||
|
||||
private DiffableStringMapDiff(StreamInput in) throws IOException {
|
||||
deletes = new ArrayList<>();
|
||||
upserts = new HashMap<>();
|
||||
int deletesCount = in.readVInt();
|
||||
for (int i = 0; i < deletesCount; i++) {
|
||||
deletes.add(in.readString());
|
||||
}
|
||||
int upsertsCount = in.readVInt();
|
||||
for (int i = 0; i < upsertsCount; i++) {
|
||||
String key = in.readString();
|
||||
String newValue = in.readString();
|
||||
upserts.put(key, newValue);
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> getDeletes() {
|
||||
return deletes;
|
||||
}
|
||||
|
||||
public Map<String, Diff<String>> getDiffs() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
public Map<String, String> getUpserts() {
|
||||
return upserts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(deletes.size());
|
||||
for (String delete : deletes) {
|
||||
out.writeString(delete);
|
||||
}
|
||||
out.writeVInt(upserts.size());
|
||||
for (Map.Entry<String, String> entry : upserts.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DiffableStringMap apply(DiffableStringMap part) {
|
||||
Map<String, String> builder = new HashMap<>(part.innerMap);
|
||||
List<String> deletes = getDeletes();
|
||||
for (String delete : deletes) {
|
||||
builder.remove(delete);
|
||||
}
|
||||
assert getDiffs().size() == 0 : "there should never be diffs for DiffableStringMap";
|
||||
|
||||
for (Map.Entry<String, String> upsert : upserts.entrySet()) {
|
||||
builder.put(upsert.getKey(), upsert.getValue());
|
||||
}
|
||||
return new DiffableStringMap(builder);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -23,7 +23,6 @@ import com.carrotsearch.hppc.LongArrayList;
|
|||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.Assertions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
|
||||
|
@ -65,7 +64,6 @@ import java.time.ZonedDateTime;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
|
@ -81,59 +79,6 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
|||
|
||||
public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragment {
|
||||
|
||||
/**
|
||||
* This class will be removed in v7.0
|
||||
*/
|
||||
@Deprecated
|
||||
public interface Custom extends Diffable<Custom>, ToXContent {
|
||||
|
||||
String type();
|
||||
|
||||
Custom fromMap(Map<String, Object> map) throws IOException;
|
||||
|
||||
Custom fromXContent(XContentParser parser) throws IOException;
|
||||
|
||||
/**
|
||||
* Reads the {@link org.elasticsearch.cluster.Diff} from StreamInput
|
||||
*/
|
||||
Diff<Custom> readDiffFrom(StreamInput in) throws IOException;
|
||||
|
||||
/**
|
||||
* Reads an object of this type from the provided {@linkplain StreamInput}. The receiving instance remains unchanged.
|
||||
*/
|
||||
Custom readFrom(StreamInput in) throws IOException;
|
||||
|
||||
/**
|
||||
* Merges from this to another, with this being more important, i.e., if something exists in this and another,
|
||||
* this will prevail.
|
||||
*/
|
||||
Custom mergeWith(Custom another);
|
||||
}
|
||||
|
||||
public static Map<String, Custom> customPrototypes = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Register a custom index meta data factory. Make sure to call it from a static block.
|
||||
*/
|
||||
public static void registerPrototype(String type, Custom proto) {
|
||||
customPrototypes.put(type, proto);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static <T extends Custom> T lookupPrototype(String type) {
|
||||
//noinspection unchecked
|
||||
return (T) customPrototypes.get(type);
|
||||
}
|
||||
|
||||
public static <T extends Custom> T lookupPrototypeSafe(String type) {
|
||||
//noinspection unchecked
|
||||
T proto = (T) customPrototypes.get(type);
|
||||
if (proto == null) {
|
||||
throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "]");
|
||||
}
|
||||
return proto;
|
||||
}
|
||||
|
||||
public static final ClusterBlock INDEX_READ_ONLY_BLOCK = new ClusterBlock(5, "index read-only (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE));
|
||||
public static final ClusterBlock INDEX_READ_BLOCK = new ClusterBlock(7, "index read (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.READ));
|
||||
public static final ClusterBlock INDEX_WRITE_BLOCK = new ClusterBlock(8, "index write (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE));
|
||||
|
@ -324,7 +269,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
|
||||
private final ImmutableOpenMap<String, MappingMetaData> mappings;
|
||||
|
||||
private final ImmutableOpenMap<String, Custom> customs;
|
||||
private final ImmutableOpenMap<String, DiffableStringMap> customData;
|
||||
|
||||
private final ImmutableOpenIntMap<Set<String>> inSyncAllocationIds;
|
||||
|
||||
|
@ -343,7 +288,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
|
||||
private IndexMetaData(Index index, long version, long mappingVersion, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings,
|
||||
ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases,
|
||||
ImmutableOpenMap<String, Custom> customs, ImmutableOpenIntMap<Set<String>> inSyncAllocationIds,
|
||||
ImmutableOpenMap<String, DiffableStringMap> customData, ImmutableOpenIntMap<Set<String>> inSyncAllocationIds,
|
||||
DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters,
|
||||
Version indexCreatedVersion, Version indexUpgradedVersion,
|
||||
int routingNumShards, int routingPartitionSize, ActiveShardCount waitForActiveShards, ImmutableOpenMap<String, RolloverInfo> rolloverInfos) {
|
||||
|
@ -360,7 +305,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1);
|
||||
this.settings = settings;
|
||||
this.mappings = mappings;
|
||||
this.customs = customs;
|
||||
this.customData = customData;
|
||||
this.aliases = aliases;
|
||||
this.inSyncAllocationIds = inSyncAllocationIds;
|
||||
this.requireFilters = requireFilters;
|
||||
|
@ -485,22 +430,14 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
return mappings.get(mappingType);
|
||||
}
|
||||
|
||||
// we keep the shrink settings for BWC - this can be removed in 8.0
|
||||
// we can't remove in 7 since this setting might be baked into an index coming in via a full cluster restart from 6.0
|
||||
public static final String INDEX_SHRINK_SOURCE_UUID_KEY = "index.shrink.source.uuid";
|
||||
public static final String INDEX_SHRINK_SOURCE_NAME_KEY = "index.shrink.source.name";
|
||||
public static final String INDEX_RESIZE_SOURCE_UUID_KEY = "index.resize.source.uuid";
|
||||
public static final String INDEX_RESIZE_SOURCE_NAME_KEY = "index.resize.source.name";
|
||||
public static final Setting<String> INDEX_SHRINK_SOURCE_UUID = Setting.simpleString(INDEX_SHRINK_SOURCE_UUID_KEY);
|
||||
public static final Setting<String> INDEX_SHRINK_SOURCE_NAME = Setting.simpleString(INDEX_SHRINK_SOURCE_NAME_KEY);
|
||||
public static final Setting<String> INDEX_RESIZE_SOURCE_UUID = Setting.simpleString(INDEX_RESIZE_SOURCE_UUID_KEY,
|
||||
INDEX_SHRINK_SOURCE_UUID);
|
||||
public static final Setting<String> INDEX_RESIZE_SOURCE_NAME = Setting.simpleString(INDEX_RESIZE_SOURCE_NAME_KEY,
|
||||
INDEX_SHRINK_SOURCE_NAME);
|
||||
public static final Setting<String> INDEX_RESIZE_SOURCE_UUID = Setting.simpleString(INDEX_RESIZE_SOURCE_UUID_KEY);
|
||||
public static final Setting<String> INDEX_RESIZE_SOURCE_NAME = Setting.simpleString(INDEX_RESIZE_SOURCE_NAME_KEY);
|
||||
|
||||
public Index getResizeSourceIndex() {
|
||||
return INDEX_RESIZE_SOURCE_UUID.exists(settings) || INDEX_SHRINK_SOURCE_UUID.exists(settings)
|
||||
? new Index(INDEX_RESIZE_SOURCE_NAME.get(settings), INDEX_RESIZE_SOURCE_UUID.get(settings)) : null;
|
||||
return INDEX_RESIZE_SOURCE_UUID.exists(settings) ? new Index(INDEX_RESIZE_SOURCE_NAME.get(settings),
|
||||
INDEX_RESIZE_SOURCE_UUID.get(settings)) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -519,13 +456,12 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
return mappings.get(MapperService.DEFAULT_MAPPING);
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, Custom> getCustoms() {
|
||||
return this.customs;
|
||||
ImmutableOpenMap<String, DiffableStringMap> getCustomData() {
|
||||
return this.customData;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T extends Custom> T custom(String type) {
|
||||
return (T) customs.get(type);
|
||||
public Map<String, String> getCustomData(final String key) {
|
||||
return Collections.unmodifiableMap(this.customData.get(key));
|
||||
}
|
||||
|
||||
public ImmutableOpenIntMap<Set<String>> getInSyncAllocationIds() {
|
||||
|
@ -591,7 +527,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
if (state != that.state) {
|
||||
return false;
|
||||
}
|
||||
if (!customs.equals(that.customs)) {
|
||||
if (!customData.equals(that.customData)) {
|
||||
return false;
|
||||
}
|
||||
if (routingNumShards != that.routingNumShards) {
|
||||
|
@ -620,7 +556,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
result = 31 * result + aliases.hashCode();
|
||||
result = 31 * result + settings.hashCode();
|
||||
result = 31 * result + mappings.hashCode();
|
||||
result = 31 * result + customs.hashCode();
|
||||
result = 31 * result + customData.hashCode();
|
||||
result = 31 * result + Long.hashCode(routingFactor);
|
||||
result = 31 * result + Long.hashCode(routingNumShards);
|
||||
result = 31 * result + Arrays.hashCode(primaryTerms);
|
||||
|
@ -660,7 +596,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
private final Settings settings;
|
||||
private final Diff<ImmutableOpenMap<String, MappingMetaData>> mappings;
|
||||
private final Diff<ImmutableOpenMap<String, AliasMetaData>> aliases;
|
||||
private final Diff<ImmutableOpenMap<String, Custom>> customs;
|
||||
private final Diff<ImmutableOpenMap<String, DiffableStringMap>> customData;
|
||||
private final Diff<ImmutableOpenIntMap<Set<String>>> inSyncAllocationIds;
|
||||
private final Diff<ImmutableOpenMap<String, RolloverInfo>> rolloverInfos;
|
||||
|
||||
|
@ -674,7 +610,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
primaryTerms = after.primaryTerms;
|
||||
mappings = DiffableUtils.diff(before.mappings, after.mappings, DiffableUtils.getStringKeySerializer());
|
||||
aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer());
|
||||
customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer());
|
||||
customData = DiffableUtils.diff(before.customData, after.customData, DiffableUtils.getStringKeySerializer());
|
||||
inSyncAllocationIds = DiffableUtils.diff(before.inSyncAllocationIds, after.inSyncAllocationIds,
|
||||
DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance());
|
||||
rolloverInfos = DiffableUtils.diff(before.rolloverInfos, after.rolloverInfos, DiffableUtils.getStringKeySerializer());
|
||||
|
@ -696,18 +632,8 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
MappingMetaData::readDiffFrom);
|
||||
aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), AliasMetaData::new,
|
||||
AliasMetaData::readDiffFrom);
|
||||
customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(),
|
||||
new DiffableUtils.DiffableValueSerializer<String, Custom>() {
|
||||
@Override
|
||||
public Custom read(StreamInput in, String key) throws IOException {
|
||||
return lookupPrototypeSafe(key).readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
|
||||
return lookupPrototypeSafe(key).readDiffFrom(in);
|
||||
}
|
||||
});
|
||||
customData = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), DiffableStringMap::new,
|
||||
DiffableStringMap::readDiffFrom);
|
||||
inSyncAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(),
|
||||
DiffableUtils.StringSetValueSerializer.getInstance());
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
|
@ -732,7 +658,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
out.writeVLongArray(primaryTerms);
|
||||
mappings.writeTo(out);
|
||||
aliases.writeTo(out);
|
||||
customs.writeTo(out);
|
||||
customData.writeTo(out);
|
||||
inSyncAllocationIds.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
rolloverInfos.writeTo(out);
|
||||
|
@ -750,7 +676,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
builder.primaryTerms(primaryTerms);
|
||||
builder.mappings.putAll(mappings.apply(part.mappings));
|
||||
builder.aliases.putAll(aliases.apply(part.aliases));
|
||||
builder.customs.putAll(customs.apply(part.customs));
|
||||
builder.customMetaData.putAll(customData.apply(part.customData));
|
||||
builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds));
|
||||
builder.rolloverInfos.putAll(rolloverInfos.apply(part.rolloverInfos));
|
||||
return builder.build();
|
||||
|
@ -780,10 +706,17 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
builder.putAlias(aliasMd);
|
||||
}
|
||||
int customSize = in.readVInt();
|
||||
for (int i = 0; i < customSize; i++) {
|
||||
String type = in.readString();
|
||||
Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in);
|
||||
builder.putCustom(type, customIndexMetaData);
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
for (int i = 0; i < customSize; i++) {
|
||||
String key = in.readString();
|
||||
DiffableStringMap custom = new DiffableStringMap(in);
|
||||
builder.putCustom(key, custom);
|
||||
}
|
||||
} else {
|
||||
assert customSize == 0 : "expected no custom index metadata";
|
||||
if (customSize > 0) {
|
||||
throw new IllegalStateException("unexpected custom metadata when none is supported");
|
||||
}
|
||||
}
|
||||
int inSyncAllocationIdsSize = in.readVInt();
|
||||
for (int i = 0; i < inSyncAllocationIdsSize; i++) {
|
||||
|
@ -819,10 +752,14 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
for (ObjectCursor<AliasMetaData> cursor : aliases.values()) {
|
||||
cursor.value.writeTo(out);
|
||||
}
|
||||
out.writeVInt(customs.size());
|
||||
for (ObjectObjectCursor<String, Custom> cursor : customs) {
|
||||
out.writeString(cursor.key);
|
||||
cursor.value.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
out.writeVInt(customData.size());
|
||||
for (final ObjectObjectCursor<String, DiffableStringMap> cursor : customData) {
|
||||
out.writeString(cursor.key);
|
||||
cursor.value.writeTo(out);
|
||||
}
|
||||
} else {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
out.writeVInt(inSyncAllocationIds.size());
|
||||
for (IntObjectCursor<Set<String>> cursor : inSyncAllocationIds) {
|
||||
|
@ -855,7 +792,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
private Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
private final ImmutableOpenMap.Builder<String, MappingMetaData> mappings;
|
||||
private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases;
|
||||
private final ImmutableOpenMap.Builder<String, Custom> customs;
|
||||
private final ImmutableOpenMap.Builder<String, DiffableStringMap> customMetaData;
|
||||
private final ImmutableOpenIntMap.Builder<Set<String>> inSyncAllocationIds;
|
||||
private final ImmutableOpenMap.Builder<String, RolloverInfo> rolloverInfos;
|
||||
private Integer routingNumShards;
|
||||
|
@ -864,7 +801,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
this.index = index;
|
||||
this.mappings = ImmutableOpenMap.builder();
|
||||
this.aliases = ImmutableOpenMap.builder();
|
||||
this.customs = ImmutableOpenMap.builder();
|
||||
this.customMetaData = ImmutableOpenMap.builder();
|
||||
this.inSyncAllocationIds = ImmutableOpenIntMap.builder();
|
||||
this.rolloverInfos = ImmutableOpenMap.builder();
|
||||
}
|
||||
|
@ -878,7 +815,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
this.primaryTerms = indexMetaData.primaryTerms.clone();
|
||||
this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings);
|
||||
this.aliases = ImmutableOpenMap.builder(indexMetaData.aliases);
|
||||
this.customs = ImmutableOpenMap.builder(indexMetaData.customs);
|
||||
this.customMetaData = ImmutableOpenMap.builder(indexMetaData.customData);
|
||||
this.routingNumShards = indexMetaData.routingNumShards;
|
||||
this.inSyncAllocationIds = ImmutableOpenIntMap.builder(indexMetaData.inSyncAllocationIds);
|
||||
this.rolloverInfos = ImmutableOpenMap.builder(indexMetaData.rolloverInfos);
|
||||
|
@ -1008,8 +945,8 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder putCustom(String type, Custom customIndexMetaData) {
|
||||
this.customs.put(type, customIndexMetaData);
|
||||
public Builder putCustom(String type, Map<String, String> customIndexMetaData) {
|
||||
this.customMetaData.put(type, new DiffableStringMap(customIndexMetaData));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1177,7 +1114,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE);
|
||||
|
||||
return new IndexMetaData(new Index(index, uuid), version, mappingVersion, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(),
|
||||
tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters,
|
||||
tmpAliases.build(), customMetaData.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters,
|
||||
indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards, rolloverInfos.build());
|
||||
}
|
||||
|
||||
|
@ -1205,10 +1142,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
}
|
||||
builder.endArray();
|
||||
|
||||
for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.getCustoms()) {
|
||||
builder.startObject(cursor.key);
|
||||
cursor.value.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
for (ObjectObjectCursor<String, DiffableStringMap> cursor : indexMetaData.customData) {
|
||||
builder.field(cursor.key);
|
||||
builder.map(cursor.value);
|
||||
}
|
||||
|
||||
builder.startObject(KEY_ALIASES);
|
||||
|
@ -1317,15 +1253,8 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
assert Version.CURRENT.major <= 5;
|
||||
parser.skipChildren();
|
||||
} else {
|
||||
// check if its a custom index metadata
|
||||
Custom proto = lookupPrototype(currentFieldName);
|
||||
if (proto == null) {
|
||||
//TODO warn
|
||||
parser.skipChildren();
|
||||
} else {
|
||||
Custom custom = proto.fromXContent(parser);
|
||||
builder.putCustom(custom.type(), custom);
|
||||
}
|
||||
// assume it's custom index metadata
|
||||
builder.putCustom(currentFieldName, parser.mapStrings());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (KEY_MAPPINGS.equals(currentFieldName)) {
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.cluster.metadata;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
|
@ -87,13 +87,10 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private final ImmutableOpenMap<String, AliasMetaData> aliases;
|
||||
|
||||
private final ImmutableOpenMap<String, IndexMetaData.Custom> customs;
|
||||
|
||||
public IndexTemplateMetaData(String name, int order, Integer version,
|
||||
List<String> patterns, Settings settings,
|
||||
ImmutableOpenMap<String, CompressedXContent> mappings,
|
||||
ImmutableOpenMap<String, AliasMetaData> aliases,
|
||||
ImmutableOpenMap<String, IndexMetaData.Custom> customs) {
|
||||
ImmutableOpenMap<String, AliasMetaData> aliases) {
|
||||
if (patterns == null || patterns.isEmpty()) {
|
||||
throw new IllegalArgumentException("Index patterns must not be null or empty; got " + patterns);
|
||||
}
|
||||
|
@ -104,7 +101,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
this.settings = settings;
|
||||
this.mappings = mappings;
|
||||
this.aliases = aliases;
|
||||
this.customs = customs;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
|
@ -165,19 +161,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
return this.aliases;
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, IndexMetaData.Custom> customs() {
|
||||
return this.customs;
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, IndexMetaData.Custom> getCustoms() {
|
||||
return this.customs;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T extends IndexMetaData.Custom> T custom(String type) {
|
||||
return (T) customs.get(type);
|
||||
}
|
||||
|
||||
public static Builder builder(String name) {
|
||||
return new Builder(name);
|
||||
}
|
||||
|
@ -227,11 +210,13 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
AliasMetaData aliasMd = new AliasMetaData(in);
|
||||
builder.putAlias(aliasMd);
|
||||
}
|
||||
int customSize = in.readVInt();
|
||||
for (int i = 0; i < customSize; i++) {
|
||||
String type = in.readString();
|
||||
IndexMetaData.Custom customIndexMetaData = IndexMetaData.lookupPrototypeSafe(type).readFrom(in);
|
||||
builder.putCustom(type, customIndexMetaData);
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
// Previously we allowed custom metadata
|
||||
int customSize = in.readVInt();
|
||||
assert customSize == 0 : "expected no custom metadata";
|
||||
if (customSize > 0) {
|
||||
throw new IllegalStateException("unexpected custom metadata when none is supported");
|
||||
}
|
||||
}
|
||||
builder.version(in.readOptionalVInt());
|
||||
return builder.build();
|
||||
|
@ -260,10 +245,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
for (ObjectCursor<AliasMetaData> cursor : aliases.values()) {
|
||||
cursor.value.writeTo(out);
|
||||
}
|
||||
out.writeVInt(customs.size());
|
||||
for (ObjectObjectCursor<String, IndexMetaData.Custom> cursor : customs) {
|
||||
out.writeString(cursor.key);
|
||||
cursor.value.writeTo(out);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
out.writeOptionalVInt(version);
|
||||
}
|
||||
|
@ -272,9 +255,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private static final Set<String> VALID_FIELDS = Sets.newHashSet(
|
||||
"template", "order", "mappings", "settings", "index_patterns", "aliases", "version");
|
||||
static {
|
||||
VALID_FIELDS.addAll(IndexMetaData.customPrototypes.keySet());
|
||||
}
|
||||
|
||||
private String name;
|
||||
|
||||
|
@ -290,13 +270,10 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases;
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, IndexMetaData.Custom> customs;
|
||||
|
||||
public Builder(String name) {
|
||||
this.name = name;
|
||||
mappings = ImmutableOpenMap.builder();
|
||||
aliases = ImmutableOpenMap.builder();
|
||||
customs = ImmutableOpenMap.builder();
|
||||
}
|
||||
|
||||
public Builder(IndexTemplateMetaData indexTemplateMetaData) {
|
||||
|
@ -308,7 +285,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
mappings = ImmutableOpenMap.builder(indexTemplateMetaData.mappings());
|
||||
aliases = ImmutableOpenMap.builder(indexTemplateMetaData.aliases());
|
||||
customs = ImmutableOpenMap.builder(indexTemplateMetaData.customs());
|
||||
}
|
||||
|
||||
public Builder order(int order) {
|
||||
|
@ -362,23 +338,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder putCustom(String type, IndexMetaData.Custom customIndexMetaData) {
|
||||
this.customs.put(type, customIndexMetaData);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder removeCustom(String type) {
|
||||
this.customs.remove(type);
|
||||
return this;
|
||||
}
|
||||
|
||||
public IndexMetaData.Custom getCustom(String type) {
|
||||
return this.customs.get(type);
|
||||
}
|
||||
|
||||
public IndexTemplateMetaData build() {
|
||||
return new IndexTemplateMetaData(name, order, version, indexPatterns, settings, mappings.build(),
|
||||
aliases.build(), customs.build());
|
||||
return new IndexTemplateMetaData(name, order, version, indexPatterns, settings, mappings.build(), aliases.build());
|
||||
}
|
||||
|
||||
public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params)
|
||||
|
@ -425,12 +386,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
builder.endArray();
|
||||
}
|
||||
|
||||
for (ObjectObjectCursor<String, IndexMetaData.Custom> cursor : indexTemplateMetaData.customs()) {
|
||||
builder.startObject(cursor.key);
|
||||
cursor.value.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
builder.startObject("aliases");
|
||||
for (ObjectCursor<AliasMetaData> cursor : indexTemplateMetaData.aliases().values()) {
|
||||
AliasMetaData.Builder.toXContent(cursor.value, builder, params);
|
||||
|
@ -468,15 +423,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
builder.putAlias(AliasMetaData.Builder.fromXContent(parser));
|
||||
}
|
||||
} else {
|
||||
// check if its a custom index metadata
|
||||
IndexMetaData.Custom proto = IndexMetaData.lookupPrototype(currentFieldName);
|
||||
if (proto == null) {
|
||||
//TODO warn
|
||||
parser.skipChildren();
|
||||
} else {
|
||||
IndexMetaData.Custom custom = proto.fromXContent(parser);
|
||||
builder.putCustom(custom.type(), custom);
|
||||
}
|
||||
throw new ElasticsearchParseException("unknown key [{}] for index template", currentFieldName);
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("mappings".equals(currentFieldName)) {
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.elasticsearch.cluster.ack.CreateIndexClusterStateUpdateResponse;
|
|||
import org.elasticsearch.cluster.block.ClusterBlock;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData.Custom;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData.State;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
|
@ -287,7 +286,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
List<IndexTemplateMetaData> templates =
|
||||
MetaDataIndexTemplateService.findTemplates(currentState.metaData(), request.index());
|
||||
|
||||
Map<String, Custom> customs = new HashMap<>();
|
||||
Map<String, Map<String, String>> customs = new HashMap<>();
|
||||
|
||||
// add the request mapping
|
||||
Map<String, Map<String, Object>> mappings = new HashMap<>();
|
||||
|
@ -300,10 +299,6 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
mappings.put(entry.getKey(), MapperService.parseMapping(xContentRegistry, entry.getValue()));
|
||||
}
|
||||
|
||||
for (Map.Entry<String, Custom> entry : request.customs().entrySet()) {
|
||||
customs.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
final Index recoverFromIndex = request.recoverFrom();
|
||||
|
||||
if (recoverFromIndex == null) {
|
||||
|
@ -320,18 +315,6 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
MapperService.parseMapping(xContentRegistry, mappingString));
|
||||
}
|
||||
}
|
||||
// handle custom
|
||||
for (ObjectObjectCursor<String, Custom> cursor : template.customs()) {
|
||||
String type = cursor.key;
|
||||
IndexMetaData.Custom custom = cursor.value;
|
||||
IndexMetaData.Custom existing = customs.get(type);
|
||||
if (existing == null) {
|
||||
customs.put(type, custom);
|
||||
} else {
|
||||
IndexMetaData.Custom merged = existing.mergeWith(custom);
|
||||
customs.put(type, merged);
|
||||
}
|
||||
}
|
||||
//handle aliases
|
||||
for (ObjectObjectCursor<String, AliasMetaData> cursor : template.aliases()) {
|
||||
AliasMetaData aliasMetaData = cursor.value;
|
||||
|
@ -519,7 +502,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
indexMetaDataBuilder.putAlias(aliasMetaData);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, Custom> customEntry : customs.entrySet()) {
|
||||
for (Map.Entry<String, Map<String, String>> customEntry : customs.entrySet()) {
|
||||
indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue());
|
||||
}
|
||||
|
||||
|
@ -723,10 +706,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
.put(IndexMetaData.INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING.getKey() + "_id",
|
||||
Strings.arrayToCommaDelimitedString(nodesToAllocateOn.toArray()))
|
||||
// we only try once and then give up with a shrink index
|
||||
.put("index.allocation.max_retries", 1)
|
||||
// we add the legacy way of specifying it here for BWC. We can remove this once it's backported to 6.x
|
||||
.put(IndexMetaData.INDEX_SHRINK_SOURCE_NAME.getKey(), resizeSourceIndex.getName())
|
||||
.put(IndexMetaData.INDEX_SHRINK_SOURCE_UUID.getKey(), resizeSourceIndex.getUUID());
|
||||
.put("index.allocation.max_retries", 1);
|
||||
} else if (type == ResizeType.SPLIT) {
|
||||
validateSplitIndex(currentState, resizeSourceIndex.getName(), mappingKeys, resizeIntoName, indexSettingsBuilder.build());
|
||||
} else {
|
||||
|
|
|
@ -179,9 +179,6 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
.indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build();
|
||||
templateBuilder.putAlias(aliasMetaData);
|
||||
}
|
||||
for (Map.Entry<String, IndexMetaData.Custom> entry : request.customs.entrySet()) {
|
||||
templateBuilder.putCustom(entry.getKey(), entry.getValue());
|
||||
}
|
||||
IndexTemplateMetaData template = templateBuilder.build();
|
||||
|
||||
MetaData.Builder builder = MetaData.builder(currentState.metaData()).put(template);
|
||||
|
@ -339,7 +336,6 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
Map<String, String> mappings = new HashMap<>();
|
||||
List<Alias> aliases = new ArrayList<>();
|
||||
Map<String, IndexMetaData.Custom> customs = new HashMap<>();
|
||||
|
||||
TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT;
|
||||
|
||||
|
@ -378,11 +374,6 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
return this;
|
||||
}
|
||||
|
||||
public PutRequest customs(Map<String, IndexMetaData.Custom> customs) {
|
||||
this.customs.putAll(customs);
|
||||
return this;
|
||||
}
|
||||
|
||||
public PutRequest putMapping(String mappingType, String mappingSource) {
|
||||
mappings.put(mappingType, mappingSource);
|
||||
return this;
|
||||
|
|
|
@ -202,8 +202,10 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
|
|||
case IndexMetaData.SETTING_VERSION_UPGRADED:
|
||||
case IndexMetaData.SETTING_INDEX_PROVIDED_NAME:
|
||||
case MergePolicyConfig.INDEX_MERGE_ENABLED:
|
||||
case IndexMetaData.INDEX_SHRINK_SOURCE_UUID_KEY:
|
||||
case IndexMetaData.INDEX_SHRINK_SOURCE_NAME_KEY:
|
||||
// we keep the shrink settings for BWC - this can be removed in 8.0
|
||||
// we can't remove in 7 since this setting might be baked into an index coming in via a full cluster restart from 6.0
|
||||
case "index.shrink.source.uuid":
|
||||
case "index.shrink.source.name":
|
||||
case IndexMetaData.INDEX_RESIZE_SOURCE_UUID_KEY:
|
||||
case IndexMetaData.INDEX_RESIZE_SOURCE_NAME_KEY:
|
||||
return true;
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.settings.Setting.Property;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.discovery.single.SingleNodeDiscovery;
|
||||
import org.elasticsearch.discovery.zen.FileBasedUnicastHostsProvider;
|
||||
import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider;
|
||||
import org.elasticsearch.discovery.zen.UnicastHostsProvider;
|
||||
import org.elasticsearch.discovery.zen.ZenDiscovery;
|
||||
|
@ -40,6 +41,7 @@ import org.elasticsearch.plugins.DiscoveryPlugin;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -69,10 +71,11 @@ public class DiscoveryModule {
|
|||
public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportService transportService,
|
||||
NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService, MasterService masterService,
|
||||
ClusterApplier clusterApplier, ClusterSettings clusterSettings, List<DiscoveryPlugin> plugins,
|
||||
AllocationService allocationService) {
|
||||
AllocationService allocationService, Path configFile) {
|
||||
final Collection<BiConsumer<DiscoveryNode,ClusterState>> joinValidators = new ArrayList<>();
|
||||
final Map<String, Supplier<UnicastHostsProvider>> hostProviders = new HashMap<>();
|
||||
hostProviders.put("settings", () -> new SettingsBasedHostsProvider(settings, transportService));
|
||||
hostProviders.put("file", () -> new FileBasedUnicastHostsProvider(settings, configFile));
|
||||
for (DiscoveryPlugin plugin : plugins) {
|
||||
plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> {
|
||||
if (hostProviders.put(entry.getKey(), entry.getValue()) != null) {
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.zen;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* An implementation of {@link UnicastHostsProvider} that reads hosts/ports
|
||||
* from {@link #UNICAST_HOSTS_FILE}.
|
||||
*
|
||||
* Each unicast host/port that is part of the discovery process must be listed on
|
||||
* a separate line. If the port is left off an entry, a default port of 9300 is
|
||||
* assumed. An example unicast hosts file could read:
|
||||
*
|
||||
* 67.81.244.10
|
||||
* 67.81.244.11:9305
|
||||
* 67.81.244.15:9400
|
||||
*/
|
||||
public class FileBasedUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider {
|
||||
|
||||
public static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt";
|
||||
|
||||
private final Path unicastHostsFilePath;
|
||||
private final Path legacyUnicastHostsFilePath;
|
||||
|
||||
public FileBasedUnicastHostsProvider(Settings settings, Path configFile) {
|
||||
super(settings);
|
||||
this.unicastHostsFilePath = configFile.resolve(UNICAST_HOSTS_FILE);
|
||||
this.legacyUnicastHostsFilePath = configFile.resolve("discovery-file").resolve(UNICAST_HOSTS_FILE);
|
||||
}
|
||||
|
||||
private List<String> getHostsList() {
|
||||
if (Files.exists(unicastHostsFilePath)) {
|
||||
return readFileContents(unicastHostsFilePath);
|
||||
}
|
||||
|
||||
if (Files.exists(legacyUnicastHostsFilePath)) {
|
||||
deprecationLogger.deprecated("Found dynamic hosts list at [{}] but this path is deprecated. This list should be at [{}] " +
|
||||
"instead. Support for the deprecated path will be removed in future.", legacyUnicastHostsFilePath, unicastHostsFilePath);
|
||||
return readFileContents(legacyUnicastHostsFilePath);
|
||||
}
|
||||
|
||||
logger.warn("expected, but did not find, a dynamic hosts list at [{}]", unicastHostsFilePath);
|
||||
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private List<String> readFileContents(Path path) {
|
||||
try (Stream<String> lines = Files.lines(path)) {
|
||||
return lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments
|
||||
.collect(Collectors.toList());
|
||||
} catch (IOException e) {
|
||||
logger.warn(() -> new ParameterizedMessage("failed to read file [{}]", unicastHostsFilePath), e);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<TransportAddress> buildDynamicHosts(HostsResolver hostsResolver) {
|
||||
final List<TransportAddress> transportAddresses = hostsResolver.resolveHosts(getHostsList(), 1);
|
||||
logger.debug("seed addresses: {}", transportAddresses);
|
||||
return transportAddresses;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,381 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import org.elasticsearch.script.IngestConditionalScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
public class ConditionalProcessor extends AbstractProcessor {
|
||||
|
||||
static final String TYPE = "conditional";
|
||||
|
||||
private final Script condition;
|
||||
|
||||
private final ScriptService scriptService;
|
||||
|
||||
private final Processor processor;
|
||||
|
||||
ConditionalProcessor(String tag, Script script, ScriptService scriptService, Processor processor) {
|
||||
super(tag);
|
||||
this.condition = script;
|
||||
this.scriptService = scriptService;
|
||||
this.processor = processor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument) throws Exception {
|
||||
IngestConditionalScript script =
|
||||
scriptService.compile(condition, IngestConditionalScript.CONTEXT).newInstance(condition.getParams());
|
||||
if (script.execute(new UnmodifiableIngestData(ingestDocument.getSourceAndMetadata()))) {
|
||||
processor.execute(ingestDocument);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
private static Object wrapUnmodifiable(Object raw) {
|
||||
// Wraps all mutable types that the JSON parser can create by immutable wrappers.
|
||||
// Any inputs not wrapped are assumed to be immutable
|
||||
if (raw instanceof Map) {
|
||||
return new UnmodifiableIngestData((Map<String, Object>) raw);
|
||||
} else if (raw instanceof List) {
|
||||
return new UnmodifiableIngestList((List<Object>) raw);
|
||||
} else if (raw instanceof byte[]) {
|
||||
return ((byte[]) raw).clone();
|
||||
}
|
||||
return raw;
|
||||
}
|
||||
|
||||
private static UnsupportedOperationException unmodifiableException() {
|
||||
return new UnsupportedOperationException("Mutating ingest documents in conditionals is not supported");
|
||||
}
|
||||
|
||||
private static final class UnmodifiableIngestData implements Map<String, Object> {
|
||||
|
||||
private final Map<String, Object> data;
|
||||
|
||||
UnmodifiableIngestData(Map<String, Object> data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return data.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return data.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(final Object key) {
|
||||
return data.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(final Object value) {
|
||||
return data.containsValue(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(final Object key) {
|
||||
return wrapUnmodifiable(data.get(key));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(final String key, final Object value) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(final Object key) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(final Map<? extends String, ?> m) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
return Collections.unmodifiableSet(data.keySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> values() {
|
||||
return new UnmodifiableIngestList(new ArrayList<>(data.values()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<String, Object>> entrySet() {
|
||||
return data.entrySet().stream().map(entry ->
|
||||
new Entry<String, Object>() {
|
||||
@Override
|
||||
public String getKey() {
|
||||
return entry.getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getValue() {
|
||||
return wrapUnmodifiable(entry.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object setValue(final Object value) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
return entry.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return entry.hashCode();
|
||||
}
|
||||
}).collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
||||
private static final class UnmodifiableIngestList implements List<Object> {
|
||||
|
||||
private final List<Object> data;
|
||||
|
||||
UnmodifiableIngestList(List<Object> data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return data.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return data.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(final Object o) {
|
||||
return data.contains(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Object> iterator() {
|
||||
Iterator<Object> wrapped = data.iterator();
|
||||
return new Iterator<Object>() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return wrapped.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object next() {
|
||||
return wrapped.next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
Object[] wrapped = data.toArray(new Object[0]);
|
||||
for (int i = 0; i < wrapped.length; i++) {
|
||||
wrapped[i] = wrapUnmodifiable(wrapped[i]);
|
||||
}
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(final T[] a) {
|
||||
Object[] raw = data.toArray(new Object[0]);
|
||||
T[] wrapped = (T[]) Arrays.copyOf(raw, a.length, a.getClass());
|
||||
for (int i = 0; i < wrapped.length; i++) {
|
||||
wrapped[i] = (T) wrapUnmodifiable(wrapped[i]);
|
||||
}
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(final Object o) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(final Object o) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(final Collection<?> c) {
|
||||
return data.contains(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(final Collection<?> c) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(final int index, final Collection<?> c) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(final Collection<?> c) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(final Collection<?> c) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(final int index) {
|
||||
return wrapUnmodifiable(data.get(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object set(final int index, final Object element) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(final int index, final Object element) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(final int index) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(final Object o) {
|
||||
return data.indexOf(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int lastIndexOf(final Object o) {
|
||||
return data.lastIndexOf(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<Object> listIterator() {
|
||||
return new UnmodifiableListIterator(data.listIterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<Object> listIterator(final int index) {
|
||||
return new UnmodifiableListIterator(data.listIterator(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Object> subList(final int fromIndex, final int toIndex) {
|
||||
return new UnmodifiableIngestList(data.subList(fromIndex, toIndex));
|
||||
}
|
||||
|
||||
private static final class UnmodifiableListIterator implements ListIterator<Object> {
|
||||
|
||||
private final ListIterator<Object> data;
|
||||
|
||||
UnmodifiableListIterator(ListIterator<Object> data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return data.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object next() {
|
||||
return wrapUnmodifiable(data.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPrevious() {
|
||||
return data.hasPrevious();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object previous() {
|
||||
return wrapUnmodifiable(data.previous());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextIndex() {
|
||||
return data.nextIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int previousIndex() {
|
||||
return data.previousIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(final Object o) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(final Object o) {
|
||||
throw unmodifiableException();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,9 +19,18 @@
|
|||
|
||||
package org.elasticsearch.ingest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
|
@ -296,6 +305,7 @@ public final class ConfigurationUtils {
|
|||
}
|
||||
|
||||
public static List<Processor> readProcessorConfigs(List<Map<String, Object>> processorConfigs,
|
||||
ScriptService scriptService,
|
||||
Map<String, Processor.Factory> processorFactories) throws Exception {
|
||||
Exception exception = null;
|
||||
List<Processor> processors = new ArrayList<>();
|
||||
|
@ -303,7 +313,7 @@ public final class ConfigurationUtils {
|
|||
for (Map<String, Object> processorConfigWithKey : processorConfigs) {
|
||||
for (Map.Entry<String, Object> entry : processorConfigWithKey.entrySet()) {
|
||||
try {
|
||||
processors.add(readProcessor(processorFactories, entry.getKey(), entry.getValue()));
|
||||
processors.add(readProcessor(processorFactories, scriptService, entry.getKey(), entry.getValue()));
|
||||
} catch (Exception e) {
|
||||
exception = ExceptionsHelper.useOrSuppress(exception, e);
|
||||
}
|
||||
|
@ -356,13 +366,14 @@ public final class ConfigurationUtils {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static Processor readProcessor(Map<String, Processor.Factory> processorFactories,
|
||||
ScriptService scriptService,
|
||||
String type, Object config) throws Exception {
|
||||
if (config instanceof Map) {
|
||||
return readProcessor(processorFactories, type, (Map<String, Object>) config);
|
||||
return readProcessor(processorFactories, scriptService, type, (Map<String, Object>) config);
|
||||
} else if (config instanceof String && "script".equals(type)) {
|
||||
Map<String, Object> normalizedScript = new HashMap<>(1);
|
||||
normalizedScript.put(ScriptType.INLINE.getParseField().getPreferredName(), config);
|
||||
return readProcessor(processorFactories, type, normalizedScript);
|
||||
return readProcessor(processorFactories, scriptService, type, normalizedScript);
|
||||
} else {
|
||||
throw newConfigurationException(type, null, null,
|
||||
"property isn't a map, but of type [" + config.getClass().getName() + "]");
|
||||
|
@ -370,15 +381,17 @@ public final class ConfigurationUtils {
|
|||
}
|
||||
|
||||
public static Processor readProcessor(Map<String, Processor.Factory> processorFactories,
|
||||
ScriptService scriptService,
|
||||
String type, Map<String, Object> config) throws Exception {
|
||||
String tag = ConfigurationUtils.readOptionalStringProperty(null, null, config, TAG_KEY);
|
||||
Script conditionalScript = extractConditional(config);
|
||||
Processor.Factory factory = processorFactories.get(type);
|
||||
if (factory != null) {
|
||||
boolean ignoreFailure = ConfigurationUtils.readBooleanProperty(null, null, config, "ignore_failure", false);
|
||||
List<Map<String, Object>> onFailureProcessorConfigs =
|
||||
ConfigurationUtils.readOptionalList(null, null, config, Pipeline.ON_FAILURE_KEY);
|
||||
|
||||
List<Processor> onFailureProcessors = readProcessorConfigs(onFailureProcessorConfigs, processorFactories);
|
||||
List<Processor> onFailureProcessors = readProcessorConfigs(onFailureProcessorConfigs, scriptService, processorFactories);
|
||||
|
||||
if (onFailureProcessorConfigs != null && onFailureProcessors.isEmpty()) {
|
||||
throw newConfigurationException(type, tag, Pipeline.ON_FAILURE_KEY,
|
||||
|
@ -392,14 +405,42 @@ public final class ConfigurationUtils {
|
|||
type, Arrays.toString(config.keySet().toArray()));
|
||||
}
|
||||
if (onFailureProcessors.size() > 0 || ignoreFailure) {
|
||||
return new CompoundProcessor(ignoreFailure, Collections.singletonList(processor), onFailureProcessors);
|
||||
} else {
|
||||
return processor;
|
||||
processor = new CompoundProcessor(ignoreFailure, Collections.singletonList(processor), onFailureProcessors);
|
||||
}
|
||||
if (conditionalScript != null) {
|
||||
processor = new ConditionalProcessor(tag, conditionalScript, scriptService, processor);
|
||||
}
|
||||
return processor;
|
||||
} catch (Exception e) {
|
||||
throw newConfigurationException(type, tag, null, e);
|
||||
}
|
||||
}
|
||||
throw newConfigurationException(type, tag, null, "No processor type exists with name [" + type + "]");
|
||||
}
|
||||
|
||||
private static Script extractConditional(Map<String, Object> config) throws IOException {
|
||||
Object scriptSource = config.remove("if");
|
||||
if (scriptSource != null) {
|
||||
try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)
|
||||
.map(normalizeScript(scriptSource));
|
||||
InputStream stream = BytesReference.bytes(builder).streamInput();
|
||||
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, stream)) {
|
||||
return Script.parse(parser);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> normalizeScript(Object scriptConfig) {
|
||||
if (scriptConfig instanceof Map<?, ?>) {
|
||||
return (Map<String, Object>) scriptConfig;
|
||||
} else if (scriptConfig instanceof String) {
|
||||
return Collections.singletonMap("source", scriptConfig);
|
||||
} else {
|
||||
throw newConfigurationException("conditional", null, "script",
|
||||
"property isn't a map or string, but of type [" + scriptConfig.getClass().getName() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,6 +71,7 @@ public class IngestService implements ClusterStateApplier {
|
|||
public static final String NOOP_PIPELINE_NAME = "_none";
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final ScriptService scriptService;
|
||||
private final Map<String, Processor.Factory> processorFactories;
|
||||
// Ideally this should be in IngestMetadata class, but we don't have the processor factories around there.
|
||||
// We know of all the processor factories when a node with all its plugin have been initialized. Also some
|
||||
|
@ -85,6 +86,7 @@ public class IngestService implements ClusterStateApplier {
|
|||
Environment env, ScriptService scriptService, AnalysisRegistry analysisRegistry,
|
||||
List<IngestPlugin> ingestPlugins) {
|
||||
this.clusterService = clusterService;
|
||||
this.scriptService = scriptService;
|
||||
this.processorFactories = processorFactories(
|
||||
ingestPlugins,
|
||||
new Processor.Parameters(
|
||||
|
@ -116,6 +118,10 @@ public class IngestService implements ClusterStateApplier {
|
|||
return clusterService;
|
||||
}
|
||||
|
||||
public ScriptService getScriptService() {
|
||||
return scriptService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the pipeline specified by id in the request.
|
||||
*/
|
||||
|
@ -300,11 +306,12 @@ public class IngestService implements ClusterStateApplier {
|
|||
}
|
||||
|
||||
Map<String, Object> pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2();
|
||||
Pipeline pipeline = Pipeline.create(request.getId(), pipelineConfig, processorFactories);
|
||||
Pipeline pipeline = Pipeline.create(request.getId(), pipelineConfig, processorFactories, scriptService);
|
||||
List<Exception> exceptions = new ArrayList<>();
|
||||
for (Processor processor : pipeline.flattenAllProcessors()) {
|
||||
for (Map.Entry<DiscoveryNode, IngestInfo> entry : ingestInfos.entrySet()) {
|
||||
if (entry.getValue().containsProcessor(processor.getType()) == false) {
|
||||
String type = processor.getType();
|
||||
if (entry.getValue().containsProcessor(type) == false && ConditionalProcessor.TYPE.equals(type) == false) {
|
||||
String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]";
|
||||
exceptions.add(
|
||||
ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message)
|
||||
|
@ -452,7 +459,10 @@ public class IngestService implements ClusterStateApplier {
|
|||
List<ElasticsearchParseException> exceptions = new ArrayList<>();
|
||||
for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) {
|
||||
try {
|
||||
pipelines.put(pipeline.getId(), Pipeline.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories));
|
||||
pipelines.put(
|
||||
pipeline.getId(),
|
||||
Pipeline.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories, scriptService)
|
||||
);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), e));
|
||||
exceptions.add(e);
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
/**
|
||||
* A pipeline is a list of {@link Processor} instances grouped under a unique id.
|
||||
|
@ -52,14 +53,15 @@ public final class Pipeline {
|
|||
}
|
||||
|
||||
public static Pipeline create(String id, Map<String, Object> config,
|
||||
Map<String, Processor.Factory> processorFactories) throws Exception {
|
||||
Map<String, Processor.Factory> processorFactories, ScriptService scriptService) throws Exception {
|
||||
String description = ConfigurationUtils.readOptionalStringProperty(null, null, config, DESCRIPTION_KEY);
|
||||
Integer version = ConfigurationUtils.readIntProperty(null, null, config, VERSION_KEY, null);
|
||||
List<Map<String, Object>> processorConfigs = ConfigurationUtils.readList(null, null, config, PROCESSORS_KEY);
|
||||
List<Processor> processors = ConfigurationUtils.readProcessorConfigs(processorConfigs, processorFactories);
|
||||
List<Processor> processors = ConfigurationUtils.readProcessorConfigs(processorConfigs, scriptService, processorFactories);
|
||||
List<Map<String, Object>> onFailureProcessorConfigs =
|
||||
ConfigurationUtils.readOptionalList(null, null, config, ON_FAILURE_KEY);
|
||||
List<Processor> onFailureProcessors = ConfigurationUtils.readProcessorConfigs(onFailureProcessorConfigs, processorFactories);
|
||||
List<Processor> onFailureProcessors =
|
||||
ConfigurationUtils.readProcessorConfigs(onFailureProcessorConfigs, scriptService, processorFactories);
|
||||
if (config.isEmpty() == false) {
|
||||
throw new ElasticsearchParseException("pipeline [" + id +
|
||||
"] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray()));
|
||||
|
|
|
@ -471,7 +471,7 @@ public class Node implements Closeable {
|
|||
final DiscoveryModule discoveryModule = new DiscoveryModule(this.settings, threadPool, transportService, namedWriteableRegistry,
|
||||
networkService, clusterService.getMasterService(), clusterService.getClusterApplierService(),
|
||||
clusterService.getClusterSettings(), pluginsService.filterPlugins(DiscoveryPlugin.class),
|
||||
clusterModule.getAllocationService());
|
||||
clusterModule.getAllocationService(), environment.configFile());
|
||||
this.nodeService = new NodeService(settings, threadPool, monitorService, discoveryModule.getDiscovery(),
|
||||
transportService, indicesService, pluginsService, circuitBreakerService, scriptModule.getScriptService(),
|
||||
httpServerTransport, ingestService, clusterService, settingsModule.getSettingsFilter(), responseCollectorService,
|
||||
|
|
|
@ -87,13 +87,19 @@ public class RestClusterGetSettingsAction extends BaseRestHandler {
|
|||
|
||||
private XContentBuilder renderResponse(ClusterState state, boolean renderDefaults, XContentBuilder builder, ToXContent.Params params)
|
||||
throws IOException {
|
||||
return
|
||||
new ClusterGetSettingsResponse(
|
||||
state.metaData().persistentSettings(),
|
||||
state.metaData().transientSettings(),
|
||||
renderDefaults ?
|
||||
settingsFilter.filter(clusterSettings.diff(state.metaData().settings(), this.settings)) :
|
||||
Settings.EMPTY
|
||||
).toXContent(builder, params);
|
||||
return response(state, renderDefaults, settingsFilter, clusterSettings, settings).toXContent(builder, params);
|
||||
}
|
||||
|
||||
static ClusterGetSettingsResponse response(
|
||||
final ClusterState state,
|
||||
final boolean renderDefaults,
|
||||
final SettingsFilter settingsFilter,
|
||||
final ClusterSettings clusterSettings,
|
||||
final Settings settings) {
|
||||
return new ClusterGetSettingsResponse(
|
||||
settingsFilter.filter(state.metaData().persistentSettings()),
|
||||
settingsFilter.filter(state.metaData().transientSettings()),
|
||||
renderDefaults ? settingsFilter.filter(clusterSettings.diff(state.metaData().settings(), settings)) : Settings.EMPTY);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A script used by {@link org.elasticsearch.ingest.ConditionalProcessor}.
|
||||
*/
|
||||
public abstract class IngestConditionalScript {
|
||||
|
||||
public static final String[] PARAMETERS = { "ctx" };
|
||||
|
||||
/** The context used to compile {@link IngestConditionalScript} factories. */
|
||||
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("processor_conditional", Factory.class);
|
||||
|
||||
/** The generic runtime parameters for the script. */
|
||||
private final Map<String, Object> params;
|
||||
|
||||
public IngestConditionalScript(Map<String, Object> params) {
|
||||
this.params = params;
|
||||
}
|
||||
|
||||
/** Return the parameters for this script. */
|
||||
public Map<String, Object> getParams() {
|
||||
return params;
|
||||
}
|
||||
|
||||
public abstract boolean execute(Map<String, Object> ctx);
|
||||
|
||||
public interface Factory {
|
||||
IngestConditionalScript newInstance(Map<String, Object> params);
|
||||
}
|
||||
}
|
|
@ -51,6 +51,7 @@ public class ScriptModule {
|
|||
BucketAggregationSelectorScript.CONTEXT,
|
||||
SignificantTermsHeuristicScoreScript.CONTEXT,
|
||||
IngestScript.CONTEXT,
|
||||
IngestConditionalScript.CONTEXT,
|
||||
FilterScript.CONTEXT,
|
||||
SimilarityScript.CONTEXT,
|
||||
SimilarityWeightScript.CONTEXT,
|
||||
|
|
|
@ -57,6 +57,7 @@ public final class FetchSourceSubPhase implements FetchSubPhase {
|
|||
if (nestedHit) {
|
||||
value = getNestedSource((Map<String, Object>) value, hitContext);
|
||||
}
|
||||
|
||||
try {
|
||||
final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length());
|
||||
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
|
||||
|
@ -81,6 +82,9 @@ public final class FetchSourceSubPhase implements FetchSubPhase {
|
|||
private Map<String, Object> getNestedSource(Map<String, Object> sourceAsMap, HitContext hitContext) {
|
||||
for (SearchHit.NestedIdentity o = hitContext.hit().getNestedIdentity(); o != null; o = o.getChild()) {
|
||||
sourceAsMap = (Map<String, Object>) sourceAsMap.get(o.getField().string());
|
||||
if (sourceAsMap == null) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return sourceAsMap;
|
||||
}
|
||||
|
|
|
@ -28,11 +28,15 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLength;
|
||||
|
||||
|
||||
public class FieldCapabilitiesResponseTests extends AbstractStreamableXContentTestCase<FieldCapabilitiesResponse> {
|
||||
|
||||
|
@ -48,22 +52,46 @@ public class FieldCapabilitiesResponseTests extends AbstractStreamableXContentTe
|
|||
|
||||
@Override
|
||||
protected FieldCapabilitiesResponse createTestInstance() {
|
||||
Map<String, Map<String, FieldCapabilities>> responses = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
// merged responses
|
||||
Map<String, Map<String, FieldCapabilities>> responses = new HashMap<>();
|
||||
|
||||
String[] fields = generateRandomStringArray(5, 10, false, true);
|
||||
assertNotNull(fields);
|
||||
|
||||
for (String field : fields) {
|
||||
Map<String, FieldCapabilities> typesToCapabilities = new HashMap<>();
|
||||
String[] types = generateRandomStringArray(5, 10, false, false);
|
||||
assertNotNull(types);
|
||||
|
||||
for (String type : types) {
|
||||
typesToCapabilities.put(type, FieldCapabilitiesTests.randomFieldCaps(field));
|
||||
}
|
||||
responses.put(field, typesToCapabilities);
|
||||
}
|
||||
return new FieldCapabilitiesResponse(responses);
|
||||
} else {
|
||||
// non-merged responses
|
||||
List<FieldCapabilitiesIndexResponse> responses = new ArrayList<>();
|
||||
int numResponse = randomIntBetween(0, 10);
|
||||
for (int i = 0; i < numResponse; i++) {
|
||||
responses.add(createRandomIndexResponse());
|
||||
}
|
||||
return new FieldCapabilitiesResponse(responses);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private FieldCapabilitiesIndexResponse createRandomIndexResponse() {
|
||||
Map<String, FieldCapabilities> responses = new HashMap<>();
|
||||
|
||||
String[] fields = generateRandomStringArray(5, 10, false, true);
|
||||
assertNotNull(fields);
|
||||
|
||||
for (String field : fields) {
|
||||
Map<String, FieldCapabilities> typesToCapabilities = new HashMap<>();
|
||||
String[] types = generateRandomStringArray(5, 10, false, false);
|
||||
assertNotNull(types);
|
||||
|
||||
for (String type : types) {
|
||||
typesToCapabilities.put(type, FieldCapabilitiesTests.randomFieldCaps(field));
|
||||
}
|
||||
responses.put(field, typesToCapabilities);
|
||||
responses.put(field, FieldCapabilitiesTests.randomFieldCaps(field));
|
||||
}
|
||||
return new FieldCapabilitiesResponse(responses);
|
||||
return new FieldCapabilitiesIndexResponse(randomAsciiLettersOfLength(10), responses);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -138,6 +166,11 @@ public class FieldCapabilitiesResponseTests extends AbstractStreamableXContentTe
|
|||
"}").replaceAll("\\s+", ""), generatedResponse);
|
||||
}
|
||||
|
||||
public void testEmptyResponse() throws IOException {
|
||||
FieldCapabilitiesResponse testInstance = new FieldCapabilitiesResponse();
|
||||
assertSerialization(testInstance);
|
||||
}
|
||||
|
||||
private static FieldCapabilitiesResponse createSimpleResponse() {
|
||||
Map<String, FieldCapabilities> titleCapabilities = new HashMap<>();
|
||||
titleCapabilities.put("text", new FieldCapabilities("title", "text", true, false));
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class DiffableStringMapTests extends ESTestCase {
|
||||
|
||||
public void testDiffableStringMapDiff() {
|
||||
Map<String, String> m = new HashMap<>();
|
||||
m.put("foo", "bar");
|
||||
m.put("baz", "eggplant");
|
||||
m.put("potato", "canon");
|
||||
DiffableStringMap dsm = new DiffableStringMap(m);
|
||||
|
||||
Map<String, String> m2 = new HashMap<>();
|
||||
m2.put("foo", "not-bar");
|
||||
m2.put("newkey", "yay");
|
||||
m2.put("baz", "eggplant");
|
||||
DiffableStringMap dsm2 = new DiffableStringMap(m2);
|
||||
|
||||
Diff<DiffableStringMap> diff = dsm2.diff(dsm);
|
||||
assertThat(diff, instanceOf(DiffableStringMap.DiffableStringMapDiff.class));
|
||||
DiffableStringMap.DiffableStringMapDiff dsmd = (DiffableStringMap.DiffableStringMapDiff) diff;
|
||||
|
||||
assertThat(dsmd.getDeletes(), containsInAnyOrder("potato"));
|
||||
assertThat(dsmd.getDiffs().size(), equalTo(0));
|
||||
Map<String, String> upserts = new HashMap<>();
|
||||
upserts.put("foo", "not-bar");
|
||||
upserts.put("newkey", "yay");
|
||||
assertThat(dsmd.getUpserts(), equalTo(upserts));
|
||||
|
||||
DiffableStringMap dsm3 = diff.apply(dsm);
|
||||
assertThat(dsm3.get("foo"), equalTo("not-bar"));
|
||||
assertThat(dsm3.get("newkey"), equalTo("yay"));
|
||||
assertThat(dsm3.get("baz"), equalTo("eggplant"));
|
||||
assertThat(dsm3.get("potato"), equalTo(null));
|
||||
}
|
||||
|
||||
public void testRandomDiffing() {
|
||||
Map<String, String> m = new HashMap<>();
|
||||
m.put("1", "1");
|
||||
m.put("2", "2");
|
||||
m.put("3", "3");
|
||||
DiffableStringMap dsm = new DiffableStringMap(m);
|
||||
DiffableStringMap expected = new DiffableStringMap(m);
|
||||
|
||||
for (int i = 0; i < randomIntBetween(5, 50); i++) {
|
||||
if (randomBoolean() && expected.size() > 1) {
|
||||
expected.remove(randomFrom(expected.keySet()));
|
||||
} else if (randomBoolean()) {
|
||||
expected.put(randomFrom(expected.keySet()), randomAlphaOfLength(4));
|
||||
} else {
|
||||
expected.put(randomAlphaOfLength(2), randomAlphaOfLength(4));
|
||||
}
|
||||
dsm = expected.diff(dsm).apply(dsm);
|
||||
}
|
||||
assertThat(expected, equalTo(dsm));
|
||||
}
|
||||
|
||||
public void testSerialization() throws IOException {
|
||||
Map<String, String> m = new HashMap<>();
|
||||
// Occasionally have an empty map
|
||||
if (frequently()) {
|
||||
m.put("foo", "bar");
|
||||
m.put("baz", "eggplant");
|
||||
m.put("potato", "canon");
|
||||
}
|
||||
DiffableStringMap dsm = new DiffableStringMap(m);
|
||||
|
||||
BytesStreamOutput bso = new BytesStreamOutput();
|
||||
dsm.writeTo(bso);
|
||||
DiffableStringMap deserialized = new DiffableStringMap(bso.bytes().streamInput());
|
||||
assertThat(deserialized, equalTo(dsm));
|
||||
}
|
||||
}
|
|
@ -56,11 +56,11 @@ import org.hamcrest.Matchers;
|
|||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
|
@ -71,13 +71,13 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.Mockito.doThrow;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.anyMap;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.doThrow;
|
||||
import static org.mockito.Mockito.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class IndexCreationTaskTests extends ESTestCase {
|
||||
|
||||
|
@ -127,14 +127,12 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
addMatchingTemplate(builder -> builder
|
||||
.putAlias(AliasMetaData.builder("alias1"))
|
||||
.putMapping("mapping1", createMapping())
|
||||
.putCustom("custom1", createCustom())
|
||||
.settings(Settings.builder().put("key1", "value1"))
|
||||
);
|
||||
|
||||
final ClusterState result = executeTask();
|
||||
|
||||
assertThat(result.metaData().index("test").getAliases(), hasKey("alias1"));
|
||||
assertThat(result.metaData().index("test").getCustoms(), hasKey("custom1"));
|
||||
assertThat(result.metaData().index("test").getSettings().get("key1"), equalTo("value1"));
|
||||
assertThat(getMappingsFromResponse(), Matchers.hasKey("mapping1"));
|
||||
}
|
||||
|
@ -142,41 +140,31 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
public void testApplyDataFromRequest() throws Exception {
|
||||
setupRequestAlias(new Alias("alias1"));
|
||||
setupRequestMapping("mapping1", createMapping());
|
||||
setupRequestCustom("custom1", createCustom());
|
||||
reqSettings.put("key1", "value1");
|
||||
|
||||
final ClusterState result = executeTask();
|
||||
|
||||
assertThat(result.metaData().index("test").getAliases(), hasKey("alias1"));
|
||||
assertThat(result.metaData().index("test").getCustoms(), hasKey("custom1"));
|
||||
assertThat(result.metaData().index("test").getSettings().get("key1"), equalTo("value1"));
|
||||
assertThat(getMappingsFromResponse(), Matchers.hasKey("mapping1"));
|
||||
}
|
||||
|
||||
public void testRequestDataHavePriorityOverTemplateData() throws Exception {
|
||||
final IndexMetaData.Custom tplCustom = createCustom();
|
||||
final IndexMetaData.Custom reqCustom = createCustom();
|
||||
final IndexMetaData.Custom mergedCustom = createCustom();
|
||||
when(reqCustom.mergeWith(tplCustom)).thenReturn(mergedCustom);
|
||||
|
||||
final CompressedXContent tplMapping = createMapping("text");
|
||||
final CompressedXContent reqMapping = createMapping("keyword");
|
||||
|
||||
addMatchingTemplate(builder -> builder
|
||||
.putAlias(AliasMetaData.builder("alias1").searchRouting("fromTpl").build())
|
||||
.putMapping("mapping1", tplMapping)
|
||||
.putCustom("custom1", tplCustom)
|
||||
.settings(Settings.builder().put("key1", "tplValue"))
|
||||
);
|
||||
|
||||
setupRequestAlias(new Alias("alias1").searchRouting("fromReq"));
|
||||
setupRequestMapping("mapping1", reqMapping);
|
||||
setupRequestCustom("custom1", reqCustom);
|
||||
reqSettings.put("key1", "reqValue");
|
||||
|
||||
final ClusterState result = executeTask();
|
||||
|
||||
assertThat(result.metaData().index("test").getCustoms().get("custom1"), equalTo(mergedCustom));
|
||||
assertThat(result.metaData().index("test").getAliases().get("alias1").getSearchRouting(), equalTo("fromReq"));
|
||||
assertThat(result.metaData().index("test").getSettings().get("key1"), equalTo("reqValue"));
|
||||
assertThat(getMappingsFromResponse().get("mapping1").toString(), equalTo("{type={properties={field={type=keyword}}}}"));
|
||||
|
@ -272,14 +260,13 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
addMatchingTemplate(builder -> builder
|
||||
.putAlias(AliasMetaData.builder("alias1").searchRouting("fromTpl").build())
|
||||
.putMapping("mapping1", createMapping())
|
||||
.putCustom("custom1", createCustom())
|
||||
.settings(Settings.builder().put("key1", "tplValue"))
|
||||
);
|
||||
|
||||
final ClusterState result = executeTask();
|
||||
|
||||
assertThat(result.metaData().index("test").getAliases(), not(hasKey("alias1")));
|
||||
assertThat(result.metaData().index("test").getCustoms(), not(hasKey("custom1")));
|
||||
assertThat(result.metaData().index("test").getCustomData(), not(hasKey("custom1")));
|
||||
assertThat(result.metaData().index("test").getSettings().keySet(), not(Matchers.contains("key1")));
|
||||
assertThat(getMappingsFromResponse(), not(Matchers.hasKey("mapping1")));
|
||||
}
|
||||
|
@ -296,7 +283,6 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
Boolean writeIndex = randomBoolean() ? null : randomBoolean();
|
||||
setupRequestAlias(new Alias("alias1").writeIndex(writeIndex));
|
||||
setupRequestMapping("mapping1", createMapping());
|
||||
setupRequestCustom("custom1", createCustom());
|
||||
reqSettings.put("key1", "value1");
|
||||
|
||||
final ClusterState result = executeTask();
|
||||
|
@ -310,7 +296,6 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
.numberOfShards(1).numberOfReplicas(0).build();
|
||||
idxBuilder.put("test2", existingWriteIndex);
|
||||
setupRequestMapping("mapping1", createMapping());
|
||||
setupRequestCustom("custom1", createCustom());
|
||||
reqSettings.put("key1", "value1");
|
||||
setupRequestAlias(new Alias("alias1").writeIndex(true));
|
||||
|
||||
|
@ -342,8 +327,8 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
.numberOfReplicas(numReplicas);
|
||||
}
|
||||
|
||||
private IndexMetaData.Custom createCustom() {
|
||||
return mock(IndexMetaData.Custom.class);
|
||||
private Map<String, String> createCustom() {
|
||||
return Collections.singletonMap("a", "b");
|
||||
}
|
||||
|
||||
private interface MetaDataBuilderConfigurator {
|
||||
|
@ -372,10 +357,6 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
when(request.mappings()).thenReturn(Collections.singletonMap(mappingKey, mapping.string()));
|
||||
}
|
||||
|
||||
private void setupRequestCustom(String customKey, IndexMetaData.Custom custom) throws IOException {
|
||||
when(request.customs()).thenReturn(Collections.singletonMap(customKey, custom));
|
||||
}
|
||||
|
||||
private CompressedXContent createMapping() throws IOException {
|
||||
return createMapping("text");
|
||||
}
|
||||
|
|
|
@ -23,7 +23,9 @@ import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition;
|
|||
import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition;
|
||||
import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition;
|
||||
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
|
@ -45,6 +47,8 @@ import org.junit.Before;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
@ -71,6 +75,9 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
public void testIndexMetaDataSerialization() throws IOException {
|
||||
Integer numShard = randomFrom(1, 2, 4, 8, 16);
|
||||
int numberOfReplicas = randomIntBetween(0, 10);
|
||||
Map<String, String> customMap = new HashMap<>();
|
||||
customMap.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
|
||||
customMap.put(randomAlphaOfLength(10), randomAlphaOfLength(15));
|
||||
IndexMetaData metaData = IndexMetaData.builder("foo")
|
||||
.settings(Settings.builder()
|
||||
.put("index.version.created", 1)
|
||||
|
@ -80,6 +87,7 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
.creationDate(randomLong())
|
||||
.primaryTerm(0, 2)
|
||||
.setRoutingNumShards(32)
|
||||
.putCustom("my_custom", customMap)
|
||||
.putRolloverInfo(
|
||||
new RolloverInfo(randomAlphaOfLength(5),
|
||||
Arrays.asList(new MaxAgeCondition(TimeValue.timeValueMillis(randomNonNegativeLong())),
|
||||
|
@ -93,7 +101,8 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
builder.endObject();
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
|
||||
final IndexMetaData fromXContentMeta = IndexMetaData.fromXContent(parser);
|
||||
assertEquals(metaData, fromXContentMeta);
|
||||
assertEquals("expected: " + Strings.toString(metaData) + "\nactual : " + Strings.toString(fromXContentMeta),
|
||||
metaData, fromXContentMeta);
|
||||
assertEquals(metaData.hashCode(), fromXContentMeta.hashCode());
|
||||
|
||||
assertEquals(metaData.getNumberOfReplicas(), fromXContentMeta.getNumberOfReplicas());
|
||||
|
@ -103,6 +112,11 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
assertEquals(metaData.getCreationDate(), fromXContentMeta.getCreationDate());
|
||||
assertEquals(metaData.getRoutingFactor(), fromXContentMeta.getRoutingFactor());
|
||||
assertEquals(metaData.primaryTerm(0), fromXContentMeta.primaryTerm(0));
|
||||
ImmutableOpenMap.Builder<String, DiffableStringMap> expectedCustomBuilder = ImmutableOpenMap.builder();
|
||||
expectedCustomBuilder.put("my_custom", new DiffableStringMap(customMap));
|
||||
ImmutableOpenMap<String, DiffableStringMap> expectedCustom = expectedCustomBuilder.build();
|
||||
assertEquals(metaData.getCustomData(), expectedCustom);
|
||||
assertEquals(metaData.getCustomData(), fromXContentMeta.getCustomData());
|
||||
|
||||
final BytesStreamOutput out = new BytesStreamOutput();
|
||||
metaData.writeTo(out);
|
||||
|
@ -119,6 +133,8 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
assertEquals(metaData.getRoutingFactor(), deserialized.getRoutingFactor());
|
||||
assertEquals(metaData.primaryTerm(0), deserialized.primaryTerm(0));
|
||||
assertEquals(metaData.getRolloverInfos(), deserialized.getRolloverInfos());
|
||||
assertEquals(deserialized.getCustomData(), expectedCustom);
|
||||
assertEquals(metaData.getCustomData(), deserialized.getCustomData());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,13 +78,13 @@ public class IndexTemplateMetaDataTests extends ESTestCase {
|
|||
public void testValidateInvalidIndexPatterns() throws Exception {
|
||||
final IllegalArgumentException emptyPatternError = expectThrows(IllegalArgumentException.class, () -> {
|
||||
new IndexTemplateMetaData(randomRealisticUnicodeOfLengthBetween(5, 10), randomInt(), randomInt(),
|
||||
Collections.emptyList(), Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of(), ImmutableOpenMap.of());
|
||||
Collections.emptyList(), Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of());
|
||||
});
|
||||
assertThat(emptyPatternError.getMessage(), equalTo("Index patterns must not be null or empty; got []"));
|
||||
|
||||
final IllegalArgumentException nullPatternError = expectThrows(IllegalArgumentException.class, () -> {
|
||||
new IndexTemplateMetaData(randomRealisticUnicodeOfLengthBetween(5, 10), randomInt(), randomInt(),
|
||||
null, Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of(), ImmutableOpenMap.of());
|
||||
null, Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of());
|
||||
});
|
||||
assertThat(nullPatternError.getMessage(), equalTo("Index patterns must not be null or empty; got null"));
|
||||
|
||||
|
|
|
@ -286,16 +286,19 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase {
|
|||
metaBuilder.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put("index.uuid", "1234"))
|
||||
.numberOfShards(4).numberOfReplicas(0));
|
||||
metaBuilder.put(IndexMetaData.builder("target").settings(settings(Version.CURRENT).put("index.uuid", "5678")
|
||||
.put("index.shrink.source.name", "test").put("index.shrink.source.uuid", "1234")).numberOfShards(1).numberOfReplicas(0));
|
||||
.put(IndexMetaData.INDEX_RESIZE_SOURCE_NAME_KEY, "test").put(IndexMetaData.INDEX_RESIZE_SOURCE_UUID_KEY, "1234"))
|
||||
.numberOfShards(1)
|
||||
.numberOfReplicas(0));
|
||||
metaBuilder.put(IndexMetaData.builder("target2").settings(settings(Version.CURRENT).put("index.uuid", "9101112")
|
||||
.put("index.shrink.source.name", "test").put("index.shrink.source.uuid", "1234")).numberOfShards(2).numberOfReplicas(0));
|
||||
.put(IndexMetaData.INDEX_RESIZE_SOURCE_NAME_KEY, "test").put(IndexMetaData.INDEX_RESIZE_SOURCE_UUID_KEY, "1234"))
|
||||
.numberOfShards(2).numberOfReplicas(0));
|
||||
MetaData metaData = metaBuilder.build();
|
||||
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
|
||||
routingTableBuilder.addAsNew(metaData.index("test"));
|
||||
routingTableBuilder.addAsNew(metaData.index("target"));
|
||||
routingTableBuilder.addAsNew(metaData.index("target2"));
|
||||
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
|
||||
.metaData(metaData).routingTable(routingTableBuilder.build()).build();
|
||||
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING
|
||||
.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTableBuilder.build()).build();
|
||||
|
||||
AllocationService allocationService = createAllocationService();
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(newNode("node1")))
|
||||
|
@ -330,7 +333,6 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase {
|
|||
assertEquals(100L, DiskThresholdDecider.getExpectedShardSize(test_1, allocation, 0));
|
||||
assertEquals(10L, DiskThresholdDecider.getExpectedShardSize(test_0, allocation, 0));
|
||||
|
||||
|
||||
ShardRouting target = ShardRouting.newUnassigned(new ShardId(new Index("target", "5678"), 0),
|
||||
true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
|
||||
assertEquals(1110L, DiskThresholdDecider.getExpectedShardSize(target, allocation, 0));
|
||||
|
@ -350,12 +352,9 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase {
|
|||
.build();
|
||||
|
||||
allocationService.reroute(clusterState, "foo");
|
||||
|
||||
RoutingAllocation allocationWithMissingSourceIndex = new RoutingAllocation(null,
|
||||
clusterStateWithMissingSourceIndex.getRoutingNodes(), clusterStateWithMissingSourceIndex, info, 0);
|
||||
|
||||
assertEquals(42L, DiskThresholdDecider.getExpectedShardSize(target, allocationWithMissingSourceIndex, 42L));
|
||||
assertEquals(42L, DiskThresholdDecider.getExpectedShardSize(target2, allocationWithMissingSourceIndex, 42L));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -42,8 +42,8 @@ import org.elasticsearch.test.gateway.TestGatewayAllocator;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_SHRINK_SOURCE_NAME;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_SHRINK_SOURCE_UUID;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_RESIZE_SOURCE_NAME;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_RESIZE_SOURCE_UUID;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
|
||||
|
@ -151,8 +151,8 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase {
|
|||
.putInSyncAllocationIds(1, Collections.singleton("aid1"))
|
||||
.build();
|
||||
metaData.put(sourceIndex, false);
|
||||
indexSettings.put(INDEX_SHRINK_SOURCE_UUID.getKey(), sourceIndex.getIndexUUID());
|
||||
indexSettings.put(INDEX_SHRINK_SOURCE_NAME.getKey(), sourceIndex.getIndex().getName());
|
||||
indexSettings.put(INDEX_RESIZE_SOURCE_UUID.getKey(), sourceIndex.getIndexUUID());
|
||||
indexSettings.put(INDEX_RESIZE_SOURCE_NAME.getKey(), sourceIndex.getIndex().getName());
|
||||
} else {
|
||||
sourceIndex = null;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.discovery;
|
||||
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
|||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.discovery.zen.UnicastHostsProvider;
|
||||
import org.elasticsearch.discovery.zen.ZenDiscovery;
|
||||
import org.elasticsearch.plugins.DiscoveryPlugin;
|
||||
|
@ -99,7 +99,7 @@ public class DiscoveryModuleTests extends ESTestCase {
|
|||
|
||||
private DiscoveryModule newModule(Settings settings, List<DiscoveryPlugin> plugins) {
|
||||
return new DiscoveryModule(settings, threadPool, transportService, namedWriteableRegistry, null, masterService,
|
||||
clusterApplier, clusterSettings, plugins, null);
|
||||
clusterApplier, clusterSettings, plugins, null, createTempDir().toAbsolutePath());
|
||||
}
|
||||
|
||||
public void testDefaults() {
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.file;
|
||||
package org.elasticsearch.discovery.zen;
|
||||
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
|
@ -26,9 +26,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress;
|
|||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.discovery.zen.UnicastZenPing;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
|
@ -50,16 +48,15 @@ import java.util.List;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOSTS_FILE;
|
||||
import static org.elasticsearch.discovery.zen.FileBasedUnicastHostsProvider.UNICAST_HOSTS_FILE;
|
||||
|
||||
/**
|
||||
* Tests for {@link FileBasedUnicastHostsProvider}.
|
||||
*/
|
||||
public class FileBasedUnicastHostsProviderTests extends ESTestCase {
|
||||
|
||||
private boolean legacyLocation;
|
||||
private ThreadPool threadPool;
|
||||
private ExecutorService executorService;
|
||||
private MockTransportService transportService;
|
||||
private Path configPath;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
|
@ -83,23 +80,20 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase {
|
|||
|
||||
@Before
|
||||
public void createTransportSvc() {
|
||||
MockTcpTransport transport =
|
||||
new MockTcpTransport(Settings.EMPTY,
|
||||
threadPool,
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
new NoneCircuitBreakerService(),
|
||||
new NamedWriteableRegistry(Collections.emptyList()),
|
||||
new NetworkService(Collections.emptyList())) {
|
||||
@Override
|
||||
public BoundTransportAddress boundAddress() {
|
||||
return new BoundTransportAddress(
|
||||
new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9300)},
|
||||
new TransportAddress(InetAddress.getLoopbackAddress(), 9300)
|
||||
);
|
||||
}
|
||||
};
|
||||
final MockTcpTransport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE,
|
||||
new NoneCircuitBreakerService(),
|
||||
new NamedWriteableRegistry(Collections.emptyList()),
|
||||
new NetworkService(Collections.emptyList())) {
|
||||
@Override
|
||||
public BoundTransportAddress boundAddress() {
|
||||
return new BoundTransportAddress(
|
||||
new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9300)},
|
||||
new TransportAddress(InetAddress.getLoopbackAddress(), 9300)
|
||||
);
|
||||
}
|
||||
};
|
||||
transportService = new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
|
||||
null);
|
||||
null);
|
||||
}
|
||||
|
||||
public void testBuildDynamicNodes() throws Exception {
|
||||
|
@ -114,18 +108,27 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase {
|
|||
assertEquals(9300, nodes.get(2).getPort());
|
||||
}
|
||||
|
||||
public void testBuildDynamicNodesLegacyLocation() throws Exception {
|
||||
legacyLocation = true;
|
||||
testBuildDynamicNodes();
|
||||
assertDeprecatedLocationWarning();
|
||||
}
|
||||
|
||||
public void testEmptyUnicastHostsFile() throws Exception {
|
||||
final List<String> hostEntries = Collections.emptyList();
|
||||
final List<TransportAddress> addresses = setupAndRunHostProvider(hostEntries);
|
||||
assertEquals(0, addresses.size());
|
||||
}
|
||||
|
||||
public void testUnicastHostsDoesNotExist() throws Exception {
|
||||
final Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.build();
|
||||
final Environment environment = TestEnvironment.newEnvironment(settings);
|
||||
final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment);
|
||||
public void testEmptyUnicastHostsFileLegacyLocation() throws Exception {
|
||||
legacyLocation = true;
|
||||
testEmptyUnicastHostsFile();
|
||||
assertDeprecatedLocationWarning();
|
||||
}
|
||||
|
||||
public void testUnicastHostsDoesNotExist() {
|
||||
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
|
||||
final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(settings, createTempDir().toAbsolutePath());
|
||||
final List<TransportAddress> addresses = provider.buildDynamicHosts((hosts, limitPortCounts) ->
|
||||
UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService,
|
||||
TimeValue.timeValueSeconds(10)));
|
||||
|
@ -133,42 +136,60 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidHostEntries() throws Exception {
|
||||
List<String> hostEntries = Arrays.asList("192.168.0.1:9300:9300");
|
||||
List<TransportAddress> addresses = setupAndRunHostProvider(hostEntries);
|
||||
final List<String> hostEntries = Arrays.asList("192.168.0.1:9300:9300");
|
||||
final List<TransportAddress> addresses = setupAndRunHostProvider(hostEntries);
|
||||
assertEquals(0, addresses.size());
|
||||
}
|
||||
|
||||
public void testInvalidHostEntriesLegacyLocation() throws Exception {
|
||||
legacyLocation = true;
|
||||
testInvalidHostEntries();
|
||||
assertDeprecatedLocationWarning();
|
||||
}
|
||||
|
||||
public void testSomeInvalidHostEntries() throws Exception {
|
||||
List<String> hostEntries = Arrays.asList("192.168.0.1:9300:9300", "192.168.0.1:9301");
|
||||
List<TransportAddress> addresses = setupAndRunHostProvider(hostEntries);
|
||||
final List<String> hostEntries = Arrays.asList("192.168.0.1:9300:9300", "192.168.0.1:9301");
|
||||
final List<TransportAddress> addresses = setupAndRunHostProvider(hostEntries);
|
||||
assertEquals(1, addresses.size()); // only one of the two is valid and will be used
|
||||
assertEquals("192.168.0.1", addresses.get(0).getAddress());
|
||||
assertEquals(9301, addresses.get(0).getPort());
|
||||
}
|
||||
|
||||
public void testSomeInvalidHostEntriesLegacyLocation() throws Exception {
|
||||
legacyLocation = true;
|
||||
testSomeInvalidHostEntries();
|
||||
assertDeprecatedLocationWarning();
|
||||
}
|
||||
|
||||
// sets up the config dir, writes to the unicast hosts file in the config dir,
|
||||
// and then runs the file-based unicast host provider to get the list of discovery nodes
|
||||
private List<TransportAddress> setupAndRunHostProvider(final List<String> hostEntries) throws IOException {
|
||||
final Path homeDir = createTempDir();
|
||||
final Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), homeDir)
|
||||
.build();
|
||||
final Path configPath;
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), homeDir)
|
||||
.build();
|
||||
if (randomBoolean()) {
|
||||
configPath = homeDir.resolve("config");
|
||||
} else {
|
||||
configPath = createTempDir();
|
||||
}
|
||||
final Path discoveryFilePath = configPath.resolve("discovery-file");
|
||||
final Path discoveryFilePath = legacyLocation ? configPath.resolve("discovery-file") : configPath;
|
||||
Files.createDirectories(discoveryFilePath);
|
||||
final Path unicastHostsPath = discoveryFilePath.resolve(UNICAST_HOSTS_FILE);
|
||||
try (BufferedWriter writer = Files.newBufferedWriter(unicastHostsPath)) {
|
||||
writer.write(String.join("\n", hostEntries));
|
||||
}
|
||||
|
||||
return new FileBasedUnicastHostsProvider(
|
||||
new Environment(settings, configPath)).buildDynamicHosts((hosts, limitPortCounts) ->
|
||||
UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService,
|
||||
TimeValue.timeValueSeconds(10)));
|
||||
return new FileBasedUnicastHostsProvider(settings, configPath).buildDynamicHosts((hosts, limitPortCounts) ->
|
||||
UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService,
|
||||
TimeValue.timeValueSeconds(10)));
|
||||
}
|
||||
|
||||
private void assertDeprecatedLocationWarning() {
|
||||
assertWarnings("Found dynamic hosts list at [" +
|
||||
configPath.resolve("discovery-file").resolve(UNICAST_HOSTS_FILE) +
|
||||
"] but this path is deprecated. This list should be at [" +
|
||||
configPath.resolve(UNICAST_HOSTS_FILE) +
|
||||
"] instead. Support for the deprecated path will be removed in future.");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.function.Consumer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
public class ConditionalProcessorTests extends ESTestCase {
|
||||
|
||||
public void testChecksCondition() throws Exception {
|
||||
String conditionalField = "field1";
|
||||
String scriptName = "conditionalScript";
|
||||
String trueValue = "truthy";
|
||||
ScriptService scriptService = new ScriptService(Settings.builder().build(),
|
||||
Collections.singletonMap(
|
||||
Script.DEFAULT_SCRIPT_LANG,
|
||||
new MockScriptEngine(
|
||||
Script.DEFAULT_SCRIPT_LANG,
|
||||
Collections.singletonMap(
|
||||
scriptName, ctx -> trueValue.equals(ctx.get(conditionalField))
|
||||
)
|
||||
)
|
||||
),
|
||||
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
||||
);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
ConditionalProcessor processor = new ConditionalProcessor(
|
||||
randomAlphaOfLength(10),
|
||||
new Script(
|
||||
ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG,
|
||||
scriptName, Collections.emptyMap()), scriptService,
|
||||
new Processor() {
|
||||
@Override
|
||||
public void execute(final IngestDocument ingestDocument) throws Exception {
|
||||
ingestDocument.setFieldValue("foo", "bar");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTag() {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
ingestDocument.setFieldValue(conditionalField, trueValue);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue));
|
||||
assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar"));
|
||||
|
||||
String falseValue = "falsy";
|
||||
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
ingestDocument.setFieldValue(conditionalField, falseValue);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue));
|
||||
assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo")));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testActsOnImmutableData() throws Exception {
|
||||
assertMutatingCtxThrows(ctx -> ctx.remove("foo"));
|
||||
assertMutatingCtxThrows(ctx -> ctx.put("foo", "bar"));
|
||||
assertMutatingCtxThrows(ctx -> ((List<Object>)ctx.get("listField")).add("bar"));
|
||||
assertMutatingCtxThrows(ctx -> ((List<Object>)ctx.get("listField")).remove("bar"));
|
||||
}
|
||||
|
||||
private static void assertMutatingCtxThrows(Consumer<Map<String, Object>> mutation) throws Exception {
|
||||
String scriptName = "conditionalScript";
|
||||
CompletableFuture<Exception> expectedException = new CompletableFuture<>();
|
||||
ScriptService scriptService = new ScriptService(Settings.builder().build(),
|
||||
Collections.singletonMap(
|
||||
Script.DEFAULT_SCRIPT_LANG,
|
||||
new MockScriptEngine(
|
||||
Script.DEFAULT_SCRIPT_LANG,
|
||||
Collections.singletonMap(
|
||||
scriptName, ctx -> {
|
||||
try {
|
||||
mutation.accept(ctx);
|
||||
} catch (Exception e) {
|
||||
expectedException.complete(e);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
)
|
||||
)
|
||||
),
|
||||
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
||||
);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
ConditionalProcessor processor = new ConditionalProcessor(
|
||||
randomAlphaOfLength(10),
|
||||
new Script(
|
||||
ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG,
|
||||
scriptName, Collections.emptyMap()), scriptService, null
|
||||
);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
ingestDocument.setFieldValue("listField", new ArrayList<>());
|
||||
processor.execute(ingestDocument);
|
||||
Exception e = expectedException.get();
|
||||
assertThat(e, instanceOf(UnsupportedOperationException.class));
|
||||
assertEquals("Mutating ingest documents in conditionals is not supported", e.getMessage());
|
||||
}
|
||||
}
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.ingest;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -38,6 +39,9 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class ConfigurationUtilsTests extends ESTestCase {
|
||||
|
||||
private final ScriptService scriptService = mock(ScriptService.class);
|
||||
|
||||
private Map<String, Object> config;
|
||||
|
||||
@Before
|
||||
|
@ -120,7 +124,7 @@ public class ConfigurationUtilsTests extends ESTestCase {
|
|||
config.add(Collections.singletonMap("test_processor", emptyConfig));
|
||||
config.add(Collections.singletonMap("test_processor", emptyConfig));
|
||||
|
||||
List<Processor> result = ConfigurationUtils.readProcessorConfigs(config, registry);
|
||||
List<Processor> result = ConfigurationUtils.readProcessorConfigs(config, scriptService, registry);
|
||||
assertThat(result.size(), equalTo(2));
|
||||
assertThat(result.get(0), sameInstance(processor));
|
||||
assertThat(result.get(1), sameInstance(processor));
|
||||
|
@ -129,7 +133,7 @@ public class ConfigurationUtilsTests extends ESTestCase {
|
|||
unknownTaggedConfig.put("tag", "my_unknown");
|
||||
config.add(Collections.singletonMap("unknown_processor", unknownTaggedConfig));
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> ConfigurationUtils.readProcessorConfigs(config, registry));
|
||||
() -> ConfigurationUtils.readProcessorConfigs(config, scriptService, registry));
|
||||
assertThat(e.getMessage(), equalTo("No processor type exists with name [unknown_processor]"));
|
||||
assertThat(e.getMetadata("es.processor_tag"), equalTo(Collections.singletonList("my_unknown")));
|
||||
assertThat(e.getMetadata("es.processor_type"), equalTo(Collections.singletonList("unknown_processor")));
|
||||
|
@ -142,7 +146,10 @@ public class ConfigurationUtilsTests extends ESTestCase {
|
|||
Map<String, Object> secondUnknonwTaggedConfig = new HashMap<>();
|
||||
secondUnknonwTaggedConfig.put("tag", "my_second_unknown");
|
||||
config2.add(Collections.singletonMap("second_unknown_processor", secondUnknonwTaggedConfig));
|
||||
e = expectThrows(ElasticsearchParseException.class, () -> ConfigurationUtils.readProcessorConfigs(config2, registry));
|
||||
e = expectThrows(
|
||||
ElasticsearchParseException.class,
|
||||
() -> ConfigurationUtils.readProcessorConfigs(config2, scriptService, registry)
|
||||
);
|
||||
assertThat(e.getMessage(), equalTo("No processor type exists with name [unknown_processor]"));
|
||||
assertThat(e.getMetadata("es.processor_tag"), equalTo(Collections.singletonList("my_unknown")));
|
||||
assertThat(e.getMetadata("es.processor_type"), equalTo(Collections.singletonList("unknown_processor")));
|
||||
|
@ -166,17 +173,17 @@ public class ConfigurationUtilsTests extends ESTestCase {
|
|||
});
|
||||
|
||||
Object emptyConfig = Collections.emptyMap();
|
||||
Processor processor1 = ConfigurationUtils.readProcessor(registry, "script", emptyConfig);
|
||||
Processor processor1 = ConfigurationUtils.readProcessor(registry, scriptService, "script", emptyConfig);
|
||||
assertThat(processor1, sameInstance(processor));
|
||||
|
||||
Object inlineScript = "test_script";
|
||||
Processor processor2 = ConfigurationUtils.readProcessor(registry, "script", inlineScript);
|
||||
Processor processor2 = ConfigurationUtils.readProcessor(registry, scriptService, "script", inlineScript);
|
||||
assertThat(processor2, sameInstance(processor));
|
||||
|
||||
Object invalidConfig = 12L;
|
||||
|
||||
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
|
||||
() -> ConfigurationUtils.readProcessor(registry, "unknown_processor", invalidConfig));
|
||||
() -> ConfigurationUtils.readProcessor(registry, scriptService, "unknown_processor", invalidConfig));
|
||||
assertThat(ex.getMessage(), equalTo("property isn't a map, but of type [" + invalidConfig.getClass().getName() + "]"));
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue