Merge remote-tracking branch 'upstream/master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-10-23 09:43:46 -07:00
commit 62ac2fa5ec
166 changed files with 5278 additions and 3086 deletions

View File

@ -77,39 +77,24 @@ Run a single test case (variants)
./gradlew test "-Dtests.class=*.ClassName" ./gradlew test "-Dtests.class=*.ClassName"
---------------------------------------------------------- ----------------------------------------------------------
Run all tests in a package and sub-packages Run all tests in a package and its sub-packages
---------------------------------------------------- ----------------------------------------------------
./gradlew test "-Dtests.class=org.elasticsearch.package.*" ./gradlew test "-Dtests.class=org.elasticsearch.package.*"
---------------------------------------------------- ----------------------------------------------------
Run any test methods that contain 'esi' (like: ...r*esi*ze...). Run any test methods that contain 'esi' (like: ...r*esi*ze...)
------------------------------- -------------------------------
./gradlew test "-Dtests.method=*esi*" ./gradlew test "-Dtests.method=*esi*"
------------------------------- -------------------------------
You can also filter tests by certain annotations ie: Run all tests that are waiting for a bugfix (disabled by default)
* `@Nightly` - tests that only run in nightly builds (disabled by default)
* `@Backwards` - backwards compatibility tests (disabled by default)
* `@AwaitsFix` - tests that are waiting for a bugfix (disabled by default)
* `@BadApple` - tests that are known to fail randomly (disabled by default)
Those annotation names can be combined into a filter expression like:
------------------------------------------------ ------------------------------------------------
./gradlew test -Dtests.filter="@nightly and not @backwards" ./gradlew test -Dtests.filter=@awaitsfix
------------------------------------------------ ------------------------------------------------
to run all nightly test but not the ones that are backwards tests. `tests.filter` supports
the boolean operators `and, or, not` and grouping ie:
---------------------------------------------------------------
./gradlew test -Dtests.filter="@nightly and not(@badapple or @backwards)"
---------------------------------------------------------------
=== Seed and repetitions. === Seed and repetitions.
Run with a given seed (seed is a hex-encoded long). Run with a given seed (seed is a hex-encoded long).
@ -160,8 +145,6 @@ Test groups can be enabled or disabled (true/false).
Default value provided below in [brackets]. Default value provided below in [brackets].
------------------------------------------------------------------ ------------------------------------------------------------------
./gradlew test -Dtests.nightly=[false] - nightly test group (@Nightly)
./gradlew test -Dtests.weekly=[false] - weekly tests (@Weekly)
./gradlew test -Dtests.awaitsfix=[false] - known issue (@AwaitsFix) ./gradlew test -Dtests.awaitsfix=[false] - known issue (@AwaitsFix)
------------------------------------------------------------------ ------------------------------------------------------------------

View File

@ -106,6 +106,7 @@ dependencies {
compile 'org.apache.rat:apache-rat:0.11' compile 'org.apache.rat:apache-rat:0.11'
compile "org.elasticsearch:jna:4.5.1" compile "org.elasticsearch:jna:4.5.1"
compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4' compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
compile 'de.thetaphi:forbiddenapis:2.6'
testCompile "junit:junit:${props.getProperty('junit')}" testCompile "junit:junit:${props.getProperty('junit')}"
} }

View File

@ -19,7 +19,10 @@
package org.elasticsearch.gradle.precommit package org.elasticsearch.gradle.precommit
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
import org.gradle.api.JavaVersion
import org.gradle.api.Project import org.gradle.api.Project
import org.gradle.api.Task import org.gradle.api.Task
import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.plugins.JavaBasePlugin
@ -33,7 +36,7 @@ class PrecommitTasks {
public static Task create(Project project, boolean includeDependencyLicenses) { public static Task create(Project project, boolean includeDependencyLicenses) {
project.configurations.create("forbiddenApisCliJar") project.configurations.create("forbiddenApisCliJar")
project.dependencies { project.dependencies {
forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5') forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.6')
} }
List<Task> precommitTasks = [ List<Task> precommitTasks = [
@ -109,47 +112,43 @@ class PrecommitTasks {
} }
private static Task configureForbiddenApisCli(Project project) { private static Task configureForbiddenApisCli(Project project) {
Task forbiddenApisCli = project.tasks.create('forbiddenApis') project.pluginManager.apply(ForbiddenApisPlugin)
project.sourceSets.all { sourceSet -> ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
forbiddenApisCli.dependsOn( project.tasks.withType(CheckForbiddenApis) {
project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) { dependsOn(buildResources)
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') targetCompatibility = project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 ?
dependsOn(buildResources) project.runtimeJavaVersion.getMajorVersion() :
it.sourceSet = sourceSet project.runtimeJavaVersion
javaHome = project.runtimeJavaHome bundledSignatures = [
targetCompatibility = project.compilerJavaVersion "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"
bundledSignatures = [ ]
"jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" signaturesFiles = project.files(
] buildResources.copy("forbidden/jdk-signatures.txt"),
signaturesFiles = project.files( buildResources.copy("forbidden/es-all-signatures.txt")
buildResources.copy("forbidden/jdk-signatures.txt"),
buildResources.copy("forbidden/es-all-signatures.txt")
)
suppressAnnotations = ['**.SuppressForbidden']
if (sourceSet.name == 'test') {
signaturesFiles += project.files(
buildResources.copy("forbidden/es-test-signatures.txt"),
buildResources.copy("forbidden/http-signatures.txt")
)
} else {
signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt"))
}
dependsOn sourceSet.classesTaskName
classesDirs = sourceSet.output.classesDirs
ext.replaceSignatureFiles = { String... names ->
signaturesFiles = project.files(
names.collect { buildResources.copy("forbidden/${it}.txt") }
)
}
ext.addSignatureFiles = { String... names ->
signaturesFiles += project.files(
names.collect { buildResources.copy("forbidden/${it}.txt") }
)
}
}
) )
suppressAnnotations = ['**.SuppressForbidden']
if (name.endsWith('Test')) {
signaturesFiles += project.files(
buildResources.copy("forbidden/es-test-signatures.txt"),
buildResources.copy("forbidden/http-signatures.txt")
)
} else {
signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt"))
}
ext.replaceSignatureFiles = { String... names ->
signaturesFiles = project.files(
names.collect { buildResources.copy("forbidden/${it}.txt") }
)
}
ext.addSignatureFiles = { String... names ->
signaturesFiles += project.files(
names.collect { buildResources.copy("forbidden/${it}.txt") }
)
}
} }
return forbiddenApisCli Task forbiddenApis = project.tasks.getByName("forbiddenApis")
forbiddenApis.group = ""
return forbiddenApis
} }
private static Task configureCheckstyle(Project project) { private static Task configureCheckstyle(Project project) {

View File

@ -1,177 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit;
import org.elasticsearch.gradle.LoggedExec;
import org.gradle.api.JavaVersion;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.TaskAction;
import org.gradle.process.JavaExecSpec;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public class ForbiddenApisCliTask extends PrecommitTask {
private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class);
private FileCollection signaturesFiles;
private List<String> signatures = new ArrayList<>();
private Set<String> bundledSignatures = new LinkedHashSet<>();
private Set<String> suppressAnnotations = new LinkedHashSet<>();
private JavaVersion targetCompatibility;
private FileCollection classesDirs;
private SourceSet sourceSet;
// This needs to be an object so it can hold Groovy GStrings
private Object javaHome;
@Input
public JavaVersion getTargetCompatibility() {
return targetCompatibility;
}
public void setTargetCompatibility(JavaVersion targetCompatibility) {
if (targetCompatibility.compareTo(JavaVersion.VERSION_1_10) > 0) {
logger.warn(
"Target compatibility is set to {} but forbiddenapis only supports up to 10. Will cap at 10.",
targetCompatibility
);
this.targetCompatibility = JavaVersion.VERSION_1_10;
} else {
this.targetCompatibility = targetCompatibility;
}
}
@InputFiles
@SkipWhenEmpty
public FileCollection getClassesDirs() {
return classesDirs.filter(File::exists);
}
public void setClassesDirs(FileCollection classesDirs) {
this.classesDirs = classesDirs;
}
@InputFiles
public FileCollection getSignaturesFiles() {
return signaturesFiles;
}
public void setSignaturesFiles(FileCollection signaturesFiles) {
this.signaturesFiles = signaturesFiles;
}
@Input
public List<String> getSignatures() {
return signatures;
}
public void setSignatures(List<String> signatures) {
this.signatures = signatures;
}
@Input
public Set<String> getBundledSignatures() {
return bundledSignatures;
}
public void setBundledSignatures(Set<String> bundledSignatures) {
this.bundledSignatures = bundledSignatures;
}
@Input
public Set<String> getSuppressAnnotations() {
return suppressAnnotations;
}
public void setSuppressAnnotations(Set<String> suppressAnnotations) {
this.suppressAnnotations = suppressAnnotations;
}
@InputFiles
public FileCollection getClassPathFromSourceSet() {
return getProject().files(
sourceSet.getCompileClasspath(),
sourceSet.getRuntimeClasspath()
);
}
public void setSourceSet(SourceSet sourceSet) {
this.sourceSet = sourceSet;
}
@InputFiles
public Configuration getForbiddenAPIsConfiguration() {
return getProject().getConfigurations().getByName("forbiddenApisCliJar");
}
@Input
public Object getJavaHome() {
return javaHome;
}
public void setJavaHome(Object javaHome) {
this.javaHome = javaHome;
}
@TaskAction
public void runForbiddenApisAndWriteMarker() {
LoggedExec.javaexec(getProject(), (JavaExecSpec spec) -> {
spec.classpath(
getForbiddenAPIsConfiguration(),
getClassPathFromSourceSet()
);
spec.setExecutable(getJavaHome() + "/bin/java");
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
// build the command line
getSignaturesFiles().forEach(file -> spec.args("-f", file.getAbsolutePath()));
getSuppressAnnotations().forEach(annotation -> spec.args("--suppressannotation", annotation));
getBundledSignatures().forEach(bundled -> {
// there's no option for target compatibility so we have to interpret it
final String prefix;
if (bundled.equals("jdk-system-out") ||
bundled.equals("jdk-reflection") ||
bundled.equals("jdk-non-portable")) {
prefix = "";
} else {
prefix = "-" + (
getTargetCompatibility().compareTo(JavaVersion.VERSION_1_9) >= 0 ?
getTargetCompatibility().getMajorVersion() :
"1." + getTargetCompatibility().getMajorVersion())
;
}
spec.args("-b", bundled + prefix);
}
);
getClassesDirs().forEach(dir ->
spec.args("-d", dir)
);
});
}
}

View File

@ -52,7 +52,7 @@ import java.util.stream.IntStream;
public class ThirdPartyAuditTask extends DefaultTask { public class ThirdPartyAuditTask extends DefaultTask {
private static final Pattern MISSING_CLASS_PATTERN = Pattern.compile( private static final Pattern MISSING_CLASS_PATTERN = Pattern.compile(
"WARNING: The referenced class '(.*)' cannot be loaded\\. Please fix the classpath!" "WARNING: Class '(.*)' cannot be loaded \\(.*\\)\\. Please fix the classpath!"
); );
private static final Pattern VIOLATION_PATTERN = Pattern.compile( private static final Pattern VIOLATION_PATTERN = Pattern.compile(

View File

@ -1,46 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.Version;
import java.util.concurrent.Future;
public interface ElasticsearchConfiguration {
String getName();
Version getVersion();
void setVersion(Version version);
default void setVersion(String version) {
setVersion(Version.fromString(version));
}
Distribution getDistribution();
void setDistribution(Distribution distribution);
void claim();
Future<Void> start();
void unClaimAndStop();
}

View File

@ -29,7 +29,7 @@ import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
public class ElasticsearchNode implements ElasticsearchConfiguration { public class ElasticsearchNode {
private final String name; private final String name;
private final GradleServicesAdapter services; private final GradleServicesAdapter services;
@ -45,34 +45,28 @@ public class ElasticsearchNode implements ElasticsearchConfiguration {
this.services = services; this.services = services;
} }
@Override
public String getName() { public String getName() {
return name; return name;
} }
@Override
public Version getVersion() { public Version getVersion() {
return version; return version;
} }
@Override
public void setVersion(Version version) { public void setVersion(Version version) {
checkNotRunning(); checkNotRunning();
this.version = version; this.version = version;
} }
@Override
public Distribution getDistribution() { public Distribution getDistribution() {
return distribution; return distribution;
} }
@Override
public void setDistribution(Distribution distribution) { public void setDistribution(Distribution distribution) {
checkNotRunning(); checkNotRunning();
this.distribution = distribution; this.distribution = distribution;
} }
@Override
public void claim() { public void claim() {
noOfClaims.incrementAndGet(); noOfClaims.incrementAndGet();
} }
@ -82,7 +76,6 @@ public class ElasticsearchNode implements ElasticsearchConfiguration {
* *
* @return future of thread running in the background * @return future of thread running in the background
*/ */
@Override
public Future<Void> start() { public Future<Void> start() {
if (started.getAndSet(true)) { if (started.getAndSet(true)) {
logger.lifecycle("Already started cluster: {}", name); logger.lifecycle("Already started cluster: {}", name);
@ -95,7 +88,6 @@ public class ElasticsearchNode implements ElasticsearchConfiguration {
/** /**
* Stops a running cluster if it's not claimed. Does nothing otherwise. * Stops a running cluster if it's not claimed. Does nothing otherwise.
*/ */
@Override
public void unClaimAndStop() { public void unClaimAndStop() {
int decrementedClaims = noOfClaims.decrementAndGet(); int decrementedClaims = noOfClaims.decrementAndGet();
if (decrementedClaims > 0) { if (decrementedClaims > 0) {

View File

@ -46,7 +46,7 @@ public class TestClustersPlugin implements Plugin<Project> {
@Override @Override
public void apply(Project project) { public void apply(Project project) {
NamedDomainObjectContainer<? extends ElasticsearchConfiguration> container = project.container( NamedDomainObjectContainer<? extends ElasticsearchNode> container = project.container(
ElasticsearchNode.class, ElasticsearchNode.class,
(name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project)) (name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project))
); );
@ -56,12 +56,12 @@ public class TestClustersPlugin implements Plugin<Project> {
listTask.setGroup("ES cluster formation"); listTask.setGroup("ES cluster formation");
listTask.setDescription("Lists all ES clusters configured for this project"); listTask.setDescription("Lists all ES clusters configured for this project");
listTask.doLast((Task task) -> listTask.doLast((Task task) ->
container.forEach((ElasticsearchConfiguration cluster) -> container.forEach((ElasticsearchNode cluster) ->
logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getDistribution()) logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getDistribution())
) )
); );
Map<Task, List<ElasticsearchConfiguration>> taskToCluster = new HashMap<>(); Map<Task, List<ElasticsearchNode>> taskToCluster = new HashMap<>();
// register an extension for all current and future tasks, so that any task can declare that it wants to use a // register an extension for all current and future tasks, so that any task can declare that it wants to use a
// specific cluster. // specific cluster.
@ -70,7 +70,7 @@ public class TestClustersPlugin implements Plugin<Project> {
.set( .set(
"useCluster", "useCluster",
new Closure<Void>(this, this) { new Closure<Void>(this, this) {
public void doCall(ElasticsearchConfiguration conf) { public void doCall(ElasticsearchNode conf) {
taskToCluster.computeIfAbsent(task, k -> new ArrayList<>()).add(conf); taskToCluster.computeIfAbsent(task, k -> new ArrayList<>()).add(conf);
} }
}) })
@ -79,7 +79,7 @@ public class TestClustersPlugin implements Plugin<Project> {
project.getGradle().getTaskGraph().whenReady(taskExecutionGraph -> project.getGradle().getTaskGraph().whenReady(taskExecutionGraph ->
taskExecutionGraph.getAllTasks() taskExecutionGraph.getAllTasks()
.forEach(task -> .forEach(task ->
taskToCluster.getOrDefault(task, Collections.emptyList()).forEach(ElasticsearchConfiguration::claim) taskToCluster.getOrDefault(task, Collections.emptyList()).forEach(ElasticsearchNode::claim)
) )
); );
project.getGradle().addListener( project.getGradle().addListener(
@ -87,7 +87,7 @@ public class TestClustersPlugin implements Plugin<Project> {
@Override @Override
public void beforeActions(Task task) { public void beforeActions(Task task) {
// we only start the cluster before the actions, so we'll not start it if the task is up-to-date // we only start the cluster before the actions, so we'll not start it if the task is up-to-date
taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::start); taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchNode::start);
} }
@Override @Override
public void afterActions(Task task) {} public void afterActions(Task task) {}
@ -99,7 +99,7 @@ public class TestClustersPlugin implements Plugin<Project> {
public void afterExecute(Task task, TaskState state) { public void afterExecute(Task task, TaskState state) {
// always un-claim the cluster, even if _this_ task is up-to-date, as others might not have been and caused the // always un-claim the cluster, even if _this_ task is up-to-date, as others might not have been and caused the
// cluster to start. // cluster to start.
taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::unClaimAndStop); taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchNode::unClaimAndStop);
} }
@Override @Override
public void beforeExecute(Task task) {} public void beforeExecute(Task task) {}

View File

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
/* /*
* Licensed to Elasticsearch under one or more contributor * Licensed to Elasticsearch under one or more contributor
@ -52,7 +52,7 @@ dependencies {
testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}"
} }
tasks.withType(ForbiddenApisCliTask) { tasks.withType(CheckForbiddenApis) {
//client does not depend on server, so only jdk and http signatures should be checked //client does not depend on server, so only jdk and http signatures should be checked
replaceSignatureFiles ('jdk-signatures', 'http-signatures') replaceSignatureFiles ('jdk-signatures', 'http-signatures')
} }

View File

@ -16,10 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
@ -32,7 +29,7 @@ dependencies {
archivesBaseName = 'elasticsearch-launchers' archivesBaseName = 'elasticsearch-launchers'
tasks.withType(ForbiddenApisCliTask) { tasks.withType(CheckForbiddenApis) {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }

View File

@ -55,7 +55,11 @@ POST /kimchy/_forcemerge?only_expunge_deletes=false&max_num_segments=100&flush=t
=== Multi Index === Multi Index
The force merge API can be applied to more than one index with a single call, or The force merge API can be applied to more than one index with a single call, or
even on `_all` the indices. even on `_all` the indices. Multi index operations are executed one shard at a
time per node. Force merge makes the storage for the shard being merged
temporarily increase, up to double its size in case `max_num_segments` is set
to `1`, as all segments need to be rewritten into a new one.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------

View File

@ -721,12 +721,30 @@ All processors are defined in the following way within a pipeline definition:
// NOTCONSOLE // NOTCONSOLE
Each processor defines its own configuration parameters, but all processors have Each processor defines its own configuration parameters, but all processors have
the ability to declare `tag` and `on_failure` fields. These fields are optional. the ability to declare `tag`, `on_failure` and `if` fields. These fields are optional.
A `tag` is simply a string identifier of the specific instantiation of a certain A `tag` is simply a string identifier of the specific instantiation of a certain
processor in a pipeline. The `tag` field does not affect the processor's behavior, processor in a pipeline. The `tag` field does not affect the processor's behavior,
but is very useful for bookkeeping and tracing errors to specific processors. but is very useful for bookkeeping and tracing errors to specific processors.
The `if` field must contain a script that returns a boolean value. If the script evaluates to `true`
then the processor will be executed for the given document otherwise it will be skipped.
The `if` field takes an object with the script fields defined in <<script-processor, script-options>>
and accesses a read only version of the document via the same `ctx` variable used by scripts in the
<<script-processor>>.
[source,js]
--------------------------------------------------
{
"set": {
"if": "ctx.bar == 'expectedValue'",
"field": "foo",
"value": "bar"
}
}
--------------------------------------------------
// NOTCONSOLE
See <<handling-failure-in-pipelines>> to learn more about the `on_failure` field and error handling in pipelines. See <<handling-failure-in-pipelines>> to learn more about the `on_failure` field and error handling in pipelines.
The <<ingest-info,node info API>> can be used to figure out what processors are available in a cluster. The <<ingest-info,node info API>> can be used to figure out what processors are available in a cluster.

View File

@ -40,7 +40,7 @@ GET /_xpack/migration/assistance
// CONSOLE // CONSOLE
// TEST[skip:cannot create an old index in docs test] // TEST[skip:cannot create an old index in docs test]
A successful call returns a list of indices that need to updated or reindexed: A successful call returns a list of indices that need to be updated or reindexed:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -73,7 +73,7 @@ GET /_xpack/migration/assistance/my_*
// CONSOLE // CONSOLE
// TEST[skip:cannot create an old index in docs test] // TEST[skip:cannot create an old index in docs test]
A successful call returns a list of indices that needs to updated or reindexed A successful call returns a list of indices that needs to be updated or reindexed
and match the index specified on the endpoint: and match the index specified on the endpoint:
[source,js] [source,js]

View File

@ -8,8 +8,8 @@
experimental[] experimental[]
This API deletes an existing rollup job. The job can be started or stopped, in both cases it will be deleted. Attempting This API deletes an existing rollup job. A job must be *stopped* first before it can be deleted. Attempting to delete
to delete a non-existing job will throw an exception a started job will result in an error. Similarly, attempting to delete a nonexistent job will throw an exception.
.Deleting the job does not delete rolled up data .Deleting the job does not delete rolled up data
********************************** **********************************
@ -99,12 +99,12 @@ A 404 `resource_not_found` exception will be thrown:
"root_cause" : [ "root_cause" : [
{ {
"type" : "resource_not_found_exception", "type" : "resource_not_found_exception",
"reason" : "the task with id does_not_exist doesn't exist", "reason" : "the task with id [does_not_exist] doesn't exist",
"stack_trace": ... "stack_trace": ...
} }
], ],
"type" : "resource_not_found_exception", "type" : "resource_not_found_exception",
"reason" : "the task with id does_not_exist doesn't exist", "reason" : "the task with id [does_not_exist] doesn't exist",
"stack_trace": ... "stack_trace": ...
}, },
"status": 404 "status": 404

View File

@ -6,7 +6,7 @@ creation in {es}, you must configure
[source,yaml] [source,yaml]
----------------------------------------------------------- -----------------------------------------------------------
action.auto_create_index: .security,.monitoring*,.watches,.triggered_watches,.watcher-history*,.ml* action.auto_create_index: .monitoring*,.watches,.triggered_watches,.watcher-history*,.ml*
----------------------------------------------------------- -----------------------------------------------------------
[IMPORTANT] [IMPORTANT]

View File

@ -3,7 +3,7 @@
[[sql-operators]] [[sql-operators]]
=== Comparison Operators === Comparison Operators
Boolean operator for comparing one or two expressions. Boolean operator for comparing against one or multiple expressions.
* Equality (`=`) * Equality (`=`)
@ -40,6 +40,13 @@ include-tagged::{sql-specs}/filter.sql-spec[whereBetween]
include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull]
-------------------------------------------------- --------------------------------------------------
* `IN (<value1>, <value2>, ...)`
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/filter.sql-spec[whereWithInAndMultipleValues]
--------------------------------------------------
[[sql-operators-logical]] [[sql-operators-logical]]
=== Logical Operators === Logical Operators

View File

@ -19,7 +19,7 @@ CAST ( expression<1> AS data_type<2> )
.Description .Description
Casts the result of the given expression to the target type. Casts the result of the given expression to the target <<sql-data-types, data type>>.
If the cast is not possible (for example because of target type is too narrow or because If the cast is not possible (for example because of target type is too narrow or because
the value itself cannot be converted), the query fails. the value itself cannot be converted), the query fails.
@ -36,4 +36,33 @@ include-tagged::{sql-specs}/docs.csv-spec[conversionIntToStringCast]
["source","sql",subs="attributes,callouts,macros"] ["source","sql",subs="attributes,callouts,macros"]
---- ----
include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast] include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast]
---- ----
[[sql-functions-type-conversion-convert]]
==== `CONVERT`
.Synopsis
[source, sql]
----
CONVERT ( expression<1>, data_type<2> )
----
<1> Expression to convert
<2> Target data type to convert to
.Description
Works exactly like <<sql-functions-type-conversion-cast>> with slightly different syntax.
Moreover, apart from the standard <<sql-data-types, data types>> it supports the corresponding
https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/explicit-data-type-conversion-function?view=sql-server-2017[ODBC data types].
["source","sql",subs="attributes,callouts,macros"]
----
include-tagged::{sql-specs}/docs.csv-spec[conversionStringToIntConvertODBCDataType]
----
["source","sql",subs="attributes,callouts,macros"]
----
include-tagged::{sql-specs}/docs.csv-spec[conversionStringToIntConvertESDataType]
----

View File

@ -34,6 +34,6 @@ indices:
["source","yaml",subs="attributes,callouts,macros"] ["source","yaml",subs="attributes,callouts,macros"]
-------------------------------------------------- --------------------------------------------------
include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] include-tagged::{sql-tests}/security/roles.yml[cli_drivers]
-------------------------------------------------- --------------------------------------------------

View File

@ -48,8 +48,7 @@ if (!isEclipse && !isIdea) {
forbiddenApisJava9 { forbiddenApisJava9 {
if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) {
targetCompatibility = JavaVersion.VERSION_1_9 targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion()
javaHome = project.java9Home
} }
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }

View File

@ -44,7 +44,7 @@ teardown:
"processors" : [ "processors" : [
{ {
"pipeline" : { "pipeline" : {
"pipeline": "inner" "name": "inner"
} }
} }
] ]
@ -78,7 +78,7 @@ teardown:
"processors" : [ "processors" : [
{ {
"pipeline" : { "pipeline" : {
"pipeline": "inner" "name": "inner"
} }
} }
] ]
@ -94,7 +94,7 @@ teardown:
"processors" : [ "processors" : [
{ {
"pipeline" : { "pipeline" : {
"pipeline": "outer" "name": "outer"
} }
} }
] ]

View File

@ -617,7 +617,7 @@ teardown:
"processors" : [ "processors" : [
{ {
"pipeline" : { "pipeline" : {
"pipeline": "inner" "name": "inner"
} }
} }
] ]
@ -633,7 +633,7 @@ teardown:
"processors" : [ "processors" : [
{ {
"pipeline" : { "pipeline" : {
"pipeline": "outer" "name": "outer"
} }
} }
] ]
@ -641,7 +641,6 @@ teardown:
- match: { acknowledged: true } - match: { acknowledged: true }
- do: - do:
catch: /illegal_state_exception/
ingest.simulate: ingest.simulate:
verbose: true verbose: true
body: > body: >
@ -650,7 +649,7 @@ teardown:
"processors" : [ "processors" : [
{ {
"pipeline" : { "pipeline" : {
"pipeline": "outer" "name": "outer"
} }
} }
] ]
@ -667,8 +666,10 @@ teardown:
} }
] ]
} }
- match: { error.root_cause.0.type: "illegal_state_exception" } - length: { docs: 1 }
- match: { error.root_cause.0.reason: "Cycle detected for pipeline: inner" } - length: { docs.0.processor_results: 1 }
- match: { docs.0.processor_results.0.error.reason: "java.lang.IllegalArgumentException: java.lang.IllegalStateException: Cycle detected for pipeline: outer" }
- match: { docs.0.processor_results.0.error.caused_by.caused_by.reason: "Cycle detected for pipeline: outer" }
--- ---
"Test verbose simulate with Pipeline Processor with Multiple Pipelines": "Test verbose simulate with Pipeline Processor with Multiple Pipelines":
@ -686,7 +687,7 @@ teardown:
}, },
{ {
"pipeline": { "pipeline": {
"pipeline": "pipeline2" "name": "pipeline2"
} }
} }
] ]
@ -724,7 +725,7 @@ teardown:
}, },
{ {
"pipeline": { "pipeline": {
"pipeline": "pipeline1" "name": "pipeline1"
} }
} }
] ]

View File

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
/* /*
* Licensed to Elasticsearch under one or more contributor * Licensed to Elasticsearch under one or more contributor
@ -25,7 +25,7 @@ esplugin {
hasClientJar = true hasClientJar = true
} }
tasks.withType(ForbiddenApisCliTask) { tasks.withType(CheckForbiddenApis) {
signatures += [ signatures += [
"com.ibm.icu.text.Collator#getInstance() @ Don't use default locale, use getInstance(ULocale) instead" "com.ibm.icu.text.Collator#getInstance() @ Don't use default locale, use getInstance(ULocale) instead"
] ]

View File

@ -61,8 +61,7 @@ if (!isEclipse && !isIdea) {
forbiddenApisJava9 { forbiddenApisJava9 {
if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) {
targetCompatibility = JavaVersion.VERSION_1_9 targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion()
javaHome = project.java9Home
} }
} }

View File

@ -51,8 +51,6 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
*/ */
public class ListTasksResponse extends BaseTasksResponse implements ToXContentObject { public class ListTasksResponse extends BaseTasksResponse implements ToXContentObject {
private static final String TASKS = "tasks"; private static final String TASKS = "tasks";
private static final String TASK_FAILURES = "task_failures";
private static final String NODE_FAILURES = "node_failures";
private List<TaskInfo> tasks; private List<TaskInfo> tasks;
@ -246,28 +244,6 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
return builder; return builder;
} }
private void toXContentCommon(XContentBuilder builder, Params params) throws IOException {
if (getTaskFailures() != null && getTaskFailures().size() > 0) {
builder.startArray(TASK_FAILURES);
for (TaskOperationFailure ex : getTaskFailures()){
builder.startObject();
builder.value(ex);
builder.endObject();
}
builder.endArray();
}
if (getNodeFailures() != null && getNodeFailures().size() > 0) {
builder.startArray(NODE_FAILURES);
for (ElasticsearchException ex : getNodeFailures()) {
builder.startObject();
ex.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
}
}
public static ListTasksResponse fromXContent(XContentParser parser) { public static ListTasksResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null); return PARSER.apply(parser, null);
} }

View File

@ -21,17 +21,13 @@ package org.elasticsearch.action.ingest;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.ingest.CompoundProcessor;
import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.ingest.CompoundProcessor;
import org.elasticsearch.ingest.PipelineProcessor;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.List; import java.util.List;
import java.util.Set;
import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; import static org.elasticsearch.ingest.TrackingResultProcessor.decorate;
@ -46,11 +42,9 @@ class SimulateExecutionService {
} }
SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) { SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) {
// Prevent cycles in pipeline decoration
final Set<PipelineProcessor> pipelinesSeen = Collections.newSetFromMap(new IdentityHashMap<>());
if (verbose) { if (verbose) {
List<SimulateProcessorResult> processorResultList = new ArrayList<>(); List<SimulateProcessorResult> processorResultList = new ArrayList<>();
CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList, pipelinesSeen); CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList);
try { try {
Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(),
verbosePipelineProcessor); verbosePipelineProcessor);

View File

@ -25,12 +25,15 @@ import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Stream; import java.util.stream.Stream;
import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toList;
@ -41,6 +44,9 @@ import static org.elasticsearch.ExceptionsHelper.rethrowAndSuppress;
* Base class for responses of task-related operations * Base class for responses of task-related operations
*/ */
public class BaseTasksResponse extends ActionResponse { public class BaseTasksResponse extends ActionResponse {
protected static final String TASK_FAILURES = "task_failures";
protected static final String NODE_FAILURES = "node_failures";
private List<TaskOperationFailure> taskFailures; private List<TaskOperationFailure> taskFailures;
private List<ElasticsearchException> nodeFailures; private List<ElasticsearchException> nodeFailures;
@ -103,4 +109,44 @@ public class BaseTasksResponse extends ActionResponse {
exp.writeTo(out); exp.writeTo(out);
} }
} }
protected void toXContentCommon(XContentBuilder builder, ToXContent.Params params) throws IOException {
if (getTaskFailures() != null && getTaskFailures().size() > 0) {
builder.startArray(TASK_FAILURES);
for (TaskOperationFailure ex : getTaskFailures()){
builder.startObject();
builder.value(ex);
builder.endObject();
}
builder.endArray();
}
if (getNodeFailures() != null && getNodeFailures().size() > 0) {
builder.startArray(NODE_FAILURES);
for (ElasticsearchException ex : getNodeFailures()) {
builder.startObject();
ex.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BaseTasksResponse response = (BaseTasksResponse) o;
return taskFailures.equals(response.taskFailures)
&& nodeFailures.equals(response.nodeFailures);
}
@Override
public int hashCode() {
return Objects.hash(taskFailures, nodeFailures);
}
} }

View File

@ -27,27 +27,27 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.time.JavaDateMathParser;
import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.indices.InvalidIndexNameException;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.SortedMap; import java.util.SortedMap;
@ -62,7 +62,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
public IndexNameExpressionResolver(Settings settings) { public IndexNameExpressionResolver(Settings settings) {
super(settings); super(settings);
expressionResolvers = Arrays.asList( expressionResolvers = Arrays.asList(
dateMathExpressionResolver = new DateMathExpressionResolver(settings), dateMathExpressionResolver = new DateMathExpressionResolver(),
new WildcardExpressionResolver() new WildcardExpressionResolver()
); );
} }
@ -815,6 +815,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
static final class DateMathExpressionResolver implements ExpressionResolver { static final class DateMathExpressionResolver implements ExpressionResolver {
private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatters.forPattern("uuuu.MM.dd");
private static final String EXPRESSION_LEFT_BOUND = "<"; private static final String EXPRESSION_LEFT_BOUND = "<";
private static final String EXPRESSION_RIGHT_BOUND = ">"; private static final String EXPRESSION_RIGHT_BOUND = ">";
private static final char LEFT_BOUND = '{'; private static final char LEFT_BOUND = '{';
@ -822,17 +823,6 @@ public class IndexNameExpressionResolver extends AbstractComponent {
private static final char ESCAPE_CHAR = '\\'; private static final char ESCAPE_CHAR = '\\';
private static final char TIME_ZONE_BOUND = '|'; private static final char TIME_ZONE_BOUND = '|';
private final DateTimeZone defaultTimeZone;
private final String defaultDateFormatterPattern;
private final DateTimeFormatter defaultDateFormatter;
DateMathExpressionResolver(Settings settings) {
String defaultTimeZoneId = settings.get("date_math_expression_resolver.default_time_zone", "UTC");
this.defaultTimeZone = DateTimeZone.forID(defaultTimeZoneId);
defaultDateFormatterPattern = settings.get("date_math_expression_resolver.default_date_format", "YYYY.MM.dd");
this.defaultDateFormatter = DateTimeFormat.forPattern(defaultDateFormatterPattern);
}
@Override @Override
public List<String> resolve(final Context context, List<String> expressions) { public List<String> resolve(final Context context, List<String> expressions) {
List<String> result = new ArrayList<>(expressions.size()); List<String> result = new ArrayList<>(expressions.size());
@ -896,13 +886,12 @@ public class IndexNameExpressionResolver extends AbstractComponent {
int dateTimeFormatLeftBoundIndex = inPlaceHolderString.indexOf(LEFT_BOUND); int dateTimeFormatLeftBoundIndex = inPlaceHolderString.indexOf(LEFT_BOUND);
String mathExpression; String mathExpression;
String dateFormatterPattern; String dateFormatterPattern;
DateTimeFormatter dateFormatter; DateFormatter dateFormatter;
final DateTimeZone timeZone; final ZoneId timeZone;
if (dateTimeFormatLeftBoundIndex < 0) { if (dateTimeFormatLeftBoundIndex < 0) {
mathExpression = inPlaceHolderString; mathExpression = inPlaceHolderString;
dateFormatterPattern = defaultDateFormatterPattern; dateFormatter = DEFAULT_DATE_FORMATTER;
dateFormatter = defaultDateFormatter; timeZone = ZoneOffset.UTC;
timeZone = defaultTimeZone;
} else { } else {
if (inPlaceHolderString.lastIndexOf(RIGHT_BOUND) != inPlaceHolderString.length() - 1) { if (inPlaceHolderString.lastIndexOf(RIGHT_BOUND) != inPlaceHolderString.length() - 1) {
throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing closing `}` for date math format", inPlaceHolderString); throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing closing `}` for date math format", inPlaceHolderString);
@ -915,20 +904,18 @@ public class IndexNameExpressionResolver extends AbstractComponent {
int formatPatternTimeZoneSeparatorIndex = dateFormatterPatternAndTimeZoneId.indexOf(TIME_ZONE_BOUND); int formatPatternTimeZoneSeparatorIndex = dateFormatterPatternAndTimeZoneId.indexOf(TIME_ZONE_BOUND);
if (formatPatternTimeZoneSeparatorIndex != -1) { if (formatPatternTimeZoneSeparatorIndex != -1) {
dateFormatterPattern = dateFormatterPatternAndTimeZoneId.substring(0, formatPatternTimeZoneSeparatorIndex); dateFormatterPattern = dateFormatterPatternAndTimeZoneId.substring(0, formatPatternTimeZoneSeparatorIndex);
timeZone = DateTimeZone.forID(dateFormatterPatternAndTimeZoneId.substring(formatPatternTimeZoneSeparatorIndex + 1)); timeZone = ZoneId.of(dateFormatterPatternAndTimeZoneId.substring(formatPatternTimeZoneSeparatorIndex + 1));
} else { } else {
dateFormatterPattern = dateFormatterPatternAndTimeZoneId; dateFormatterPattern = dateFormatterPatternAndTimeZoneId;
timeZone = defaultTimeZone; timeZone = ZoneOffset.UTC;
} }
dateFormatter = DateTimeFormat.forPattern(dateFormatterPattern); dateFormatter = DateFormatters.forPattern(dateFormatterPattern);
} }
DateTimeFormatter parser = dateFormatter.withZone(timeZone); DateFormatter formatter = dateFormatter.withZone(timeZone);
FormatDateTimeFormatter formatter = new FormatDateTimeFormatter(dateFormatterPattern, parser, Locale.ROOT); DateMathParser dateMathParser = new JavaDateMathParser(formatter);
DateMathParser dateMathParser = formatter.toDateMathParser(); long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
long millis = dateMathParser.parse(mathExpression, context::getStartTime, false,
DateUtils.dateTimeZoneToZoneId(timeZone));
String time = formatter.printer().print(millis); String time = formatter.format(Instant.ofEpochMilli(millis));
beforePlaceHolderSb.append(time); beforePlaceHolderSb.append(time);
inPlaceHolderSb = new StringBuilder(); inPlaceHolderSb = new StringBuilder();
inPlaceHolder = false; inPlaceHolder = false;
@ -968,18 +955,4 @@ public class IndexNameExpressionResolver extends AbstractComponent {
return beforePlaceHolderSb.toString(); return beforePlaceHolderSb.toString();
} }
} }
/**
* Returns <code>true</code> iff the given expression resolves to the given index name otherwise <code>false</code>
*/
public final boolean matchesIndex(String indexName, String expression, ClusterState state) {
final String[] concreteIndices = concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), expression);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, indexName)) {
return true;
}
}
return indexName.equals(expression);
}
} }

View File

@ -249,53 +249,45 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
} }
/** /**
* Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards. * Finds the specific index aliases that point to the requested concrete indices directly
* or that match with the indices via wildcards.
* *
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @param concreteIndices The concrete indices that the aliases must point to in order to be returned.
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching
* present for that index * aliases then the result will <b>not</b> include the index's key.
*/ */
public ImmutableOpenMap<String, List<AliasMetaData>> findAllAliases(String[] concreteIndices) { public ImmutableOpenMap<String, List<AliasMetaData>> findAllAliases(final String[] concreteIndices) {
return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices); return findAliases(Strings.EMPTY_ARRAY, concreteIndices);
} }
/** /**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards, and
* that point to the specified concrete indices or match partially with the indices via wildcards. * that point to the specified concrete indices (directly or matching indices via wildcards).
* *
* @param aliasesRequest The request to find aliases for * @param aliasesRequest The request to find aliases for
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @param concreteIndices The concrete indices that the aliases must point to in order to be returned.
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching
* present for that index * aliases then the result will <b>not</b> include the index's key.
*/ */
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) { public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final AliasesRequest aliasesRequest, final String[] concreteIndices) {
return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices); return findAliases(aliasesRequest.aliases(), concreteIndices);
} }
/** /**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards, and
* that point to the specified concrete indices or match partially with the indices via wildcards. * that point to the specified concrete indices (directly or matching indices via wildcards).
* *
* @param aliases The aliases to look for * @param aliases The aliases to look for. Might contain include or exclude wildcards.
* @param originalAliases The original aliases that the user originally requested * @param concreteIndices The concrete indices that the aliases must point to in order to be returned
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are * aliases then the result will <b>not</b> include the index's key.
* present for that index
*/ */
private ImmutableOpenMap<String, List<AliasMetaData>> findAliases(String[] originalAliases, String[] aliases, private ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, final String[] concreteIndices) {
String[] concreteIndices) {
assert aliases != null; assert aliases != null;
assert originalAliases != null;
assert concreteIndices != null; assert concreteIndices != null;
if (concreteIndices.length == 0) { if (concreteIndices.length == 0) {
return ImmutableOpenMap.of(); return ImmutableOpenMap.of();
} }
//if aliases were provided but they got replaced with empty aliases, return empty map
if (originalAliases.length > 0 && aliases.length == 0) {
return ImmutableOpenMap.of();
}
String[] patterns = new String[aliases.length]; String[] patterns = new String[aliases.length];
boolean[] include = new boolean[aliases.length]; boolean[] include = new boolean[aliases.length];
for (int i = 0; i < aliases.length; i++) { for (int i = 0; i < aliases.length; i++) {
@ -331,7 +323,6 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
filteredValues.add(value); filteredValues.add(value);
} }
} }
if (filteredValues.isEmpty() == false) { if (filteredValues.isEmpty() == false) {
// Make the list order deterministic // Make the list order deterministic
CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasMetaData::alias)); CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasMetaData::alias));

View File

@ -20,12 +20,15 @@
package org.elasticsearch.ingest; package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.collect.Tuple;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.LongSupplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
@ -40,16 +43,33 @@ public class CompoundProcessor implements Processor {
private final boolean ignoreFailure; private final boolean ignoreFailure;
private final List<Processor> processors; private final List<Processor> processors;
private final List<Processor> onFailureProcessors; private final List<Processor> onFailureProcessors;
private final List<Tuple<Processor, IngestMetric>> processorsWithMetrics;
private final LongSupplier relativeTimeProvider;
CompoundProcessor(LongSupplier relativeTimeProvider, Processor... processor) {
this(false, Arrays.asList(processor), Collections.emptyList(), relativeTimeProvider);
}
public CompoundProcessor(Processor... processor) { public CompoundProcessor(Processor... processor) {
this(false, Arrays.asList(processor), Collections.emptyList()); this(false, Arrays.asList(processor), Collections.emptyList());
} }
public CompoundProcessor(boolean ignoreFailure, List<Processor> processors, List<Processor> onFailureProcessors) { public CompoundProcessor(boolean ignoreFailure, List<Processor> processors, List<Processor> onFailureProcessors) {
this(ignoreFailure, processors, onFailureProcessors, System::nanoTime);
}
CompoundProcessor(boolean ignoreFailure, List<Processor> processors, List<Processor> onFailureProcessors,
LongSupplier relativeTimeProvider) {
super(); super();
this.ignoreFailure = ignoreFailure; this.ignoreFailure = ignoreFailure;
this.processors = processors; this.processors = processors;
this.onFailureProcessors = onFailureProcessors; this.onFailureProcessors = onFailureProcessors;
this.relativeTimeProvider = relativeTimeProvider;
this.processorsWithMetrics = new ArrayList<>(processors.size());
processors.forEach(p -> processorsWithMetrics.add(new Tuple<>(p, new IngestMetric())));
}
List<Tuple<Processor, IngestMetric>> getProcessorsWithMetrics() {
return processorsWithMetrics;
} }
public boolean isIgnoreFailure() { public boolean isIgnoreFailure() {
@ -94,12 +114,17 @@ public class CompoundProcessor implements Processor {
@Override @Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception { public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
for (Processor processor : processors) { for (Tuple<Processor, IngestMetric> processorWithMetric : processorsWithMetrics) {
Processor processor = processorWithMetric.v1();
IngestMetric metric = processorWithMetric.v2();
long startTimeInNanos = relativeTimeProvider.getAsLong();
try { try {
metric.preIngest();
if (processor.execute(ingestDocument) == null) { if (processor.execute(ingestDocument) == null) {
return null; return null;
} }
} catch (Exception e) { } catch (Exception e) {
metric.ingestFailed();
if (ignoreFailure) { if (ignoreFailure) {
continue; continue;
} }
@ -112,11 +137,15 @@ public class CompoundProcessor implements Processor {
executeOnFailure(ingestDocument, compoundProcessorException); executeOnFailure(ingestDocument, compoundProcessorException);
break; break;
} }
} finally {
long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos);
metric.postIngest(ingestTimeInMillis);
} }
} }
return ingestDocument; return ingestDocument;
} }
void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception {
try { try {
putFailureMetadata(ingestDocument, exception); putFailureMetadata(ingestDocument, exception);

View File

@ -28,6 +28,8 @@ import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.LongSupplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.elasticsearch.script.IngestConditionalScript; import org.elasticsearch.script.IngestConditionalScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
@ -42,24 +44,54 @@ public class ConditionalProcessor extends AbstractProcessor {
private final ScriptService scriptService; private final ScriptService scriptService;
private final Processor processor; private final Processor processor;
private final IngestMetric metric;
private final LongSupplier relativeTimeProvider;
ConditionalProcessor(String tag, Script script, ScriptService scriptService, Processor processor) { ConditionalProcessor(String tag, Script script, ScriptService scriptService, Processor processor) {
this(tag, script, scriptService, processor, System::nanoTime);
}
ConditionalProcessor(String tag, Script script, ScriptService scriptService, Processor processor, LongSupplier relativeTimeProvider) {
super(tag); super(tag);
this.condition = script; this.condition = script;
this.scriptService = scriptService; this.scriptService = scriptService;
this.processor = processor; this.processor = processor;
this.metric = new IngestMetric();
this.relativeTimeProvider = relativeTimeProvider;
} }
@Override @Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception { public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
IngestConditionalScript script = if (evaluate(ingestDocument)) {
scriptService.compile(condition, IngestConditionalScript.CONTEXT).newInstance(condition.getParams()); long startTimeInNanos = relativeTimeProvider.getAsLong();
if (script.execute(new UnmodifiableIngestData(ingestDocument.getSourceAndMetadata()))) { try {
return processor.execute(ingestDocument); metric.preIngest();
return processor.execute(ingestDocument);
} catch (Exception e) {
metric.ingestFailed();
throw e;
} finally {
long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos);
metric.postIngest(ingestTimeInMillis);
}
} }
return ingestDocument; return ingestDocument;
} }
boolean evaluate(IngestDocument ingestDocument) {
IngestConditionalScript script =
scriptService.compile(condition, IngestConditionalScript.CONTEXT).newInstance(condition.getParams());
return script.execute(new UnmodifiableIngestData(ingestDocument.getSourceAndMetadata()));
}
Processor getProcessor() {
return processor;
}
IngestMetric getMetric() {
return metric;
}
@Override @Override
public String getType() { public String getType() {
return TYPE; return TYPE;

View File

@ -19,19 +19,6 @@
package org.elasticsearch.ingest; package org.elasticsearch.ingest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
@ -49,6 +36,7 @@ import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@ -61,6 +49,19 @@ import org.elasticsearch.plugins.IngestPlugin;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
/** /**
* Holder class for several ingest related services. * Holder class for several ingest related services.
*/ */
@ -262,11 +263,59 @@ public class IngestService implements ClusterStateApplier {
Pipeline originalPipeline = originalPipelines.get(id); Pipeline originalPipeline = originalPipelines.get(id);
if (originalPipeline != null) { if (originalPipeline != null) {
pipeline.getMetrics().add(originalPipeline.getMetrics()); pipeline.getMetrics().add(originalPipeline.getMetrics());
List<Tuple<Processor, IngestMetric>> oldPerProcessMetrics = new ArrayList<>();
List<Tuple<Processor, IngestMetric>> newPerProcessMetrics = new ArrayList<>();
getProcessorMetrics(originalPipeline.getCompoundProcessor(), oldPerProcessMetrics);
getProcessorMetrics(pipeline.getCompoundProcessor(), newPerProcessMetrics);
//Best attempt to populate new processor metrics using a parallel array of the old metrics. This is not ideal since
//the per processor metrics may get reset when the arrays don't match. However, to get to an ideal model, unique and
//consistent id's per processor and/or semantic equals for each processor will be needed.
if (newPerProcessMetrics.size() == oldPerProcessMetrics.size()) {
Iterator<Tuple<Processor, IngestMetric>> oldMetricsIterator = oldPerProcessMetrics.iterator();
for (Tuple<Processor, IngestMetric> compositeMetric : newPerProcessMetrics) {
String type = compositeMetric.v1().getType();
IngestMetric metric = compositeMetric.v2();
if (oldMetricsIterator.hasNext()) {
Tuple<Processor, IngestMetric> oldCompositeMetric = oldMetricsIterator.next();
String oldType = oldCompositeMetric.v1().getType();
IngestMetric oldMetric = oldCompositeMetric.v2();
if (type.equals(oldType)) {
metric.add(oldMetric);
}
}
}
}
} }
}); });
} }
} }
/**
* Recursive method to obtain all of the non-failure processors for given compoundProcessor. Since conditionals are implemented as
* wrappers to the actual processor, always prefer the actual processor's metric over the conditional processor's metric.
* @param compoundProcessor The compound processor to start walking the non-failure processors
* @param processorMetrics The list of {@link Processor} {@link IngestMetric} tuples.
* @return the processorMetrics for all non-failure processor that belong to the original compoundProcessor
*/
private static List<Tuple<Processor, IngestMetric>> getProcessorMetrics(CompoundProcessor compoundProcessor,
List<Tuple<Processor, IngestMetric>> processorMetrics) {
//only surface the top level non-failure processors, on-failure processor times will be included in the top level non-failure
for (Tuple<Processor, IngestMetric> processorWithMetric : compoundProcessor.getProcessorsWithMetrics()) {
Processor processor = processorWithMetric.v1();
IngestMetric metric = processorWithMetric.v2();
if (processor instanceof CompoundProcessor) {
getProcessorMetrics((CompoundProcessor) processor, processorMetrics);
} else {
//Prefer the conditional's metric since it only includes metrics when the conditional evaluated to true.
if (processor instanceof ConditionalProcessor) {
metric = ((ConditionalProcessor) processor).getMetric();
}
processorMetrics.add(new Tuple<>(processor, metric));
}
}
return processorMetrics;
}
private static Pipeline substitutePipeline(String id, ElasticsearchParseException e) { private static Pipeline substitutePipeline(String id, ElasticsearchParseException e) {
String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null; String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null;
String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown"; String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown";
@ -371,11 +420,42 @@ public class IngestService implements ClusterStateApplier {
} }
public IngestStats stats() { public IngestStats stats() {
IngestStats.Builder statsBuilder = new IngestStats.Builder();
statsBuilder.addTotalMetrics(totalMetrics);
pipelines.forEach((id, pipeline) -> {
CompoundProcessor rootProcessor = pipeline.getCompoundProcessor();
statsBuilder.addPipelineMetrics(id, pipeline.getMetrics());
List<Tuple<Processor, IngestMetric>> processorMetrics = new ArrayList<>();
getProcessorMetrics(rootProcessor, processorMetrics);
processorMetrics.forEach(t -> {
Processor processor = t.v1();
IngestMetric processorMetric = t.v2();
statsBuilder.addProcessorMetrics(id, getProcessorName(processor), processorMetric);
});
});
return statsBuilder.build();
}
Map<String, IngestStats.Stats> statsPerPipeline = //package private for testing
pipelines.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, v -> v.getValue().getMetrics().createStats())); static String getProcessorName(Processor processor){
// conditionals are implemented as wrappers around the real processor, so get the real processor for the correct type for the name
if(processor instanceof ConditionalProcessor){
processor = ((ConditionalProcessor) processor).getProcessor();
}
StringBuilder sb = new StringBuilder(5);
sb.append(processor.getType());
return new IngestStats(totalMetrics.createStats(), statsPerPipeline); if(processor instanceof PipelineProcessor){
String pipelineName = ((PipelineProcessor) processor).getPipelineName();
sb.append(":");
sb.append(pipelineName);
}
String tag = processor.getTag();
if(tag != null && !tag.isEmpty()){
sb.append(":");
sb.append(tag);
}
return sb.toString();
} }
private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer<IndexRequest> itemDroppedHandler) throws Exception { private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer<IndexRequest> itemDroppedHandler) throws Exception {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.ingest; package org.elasticsearch.ingest;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -27,17 +28,28 @@ import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
public class IngestStats implements Writeable, ToXContentFragment { public class IngestStats implements Writeable, ToXContentFragment {
private final Stats totalStats; private final Stats totalStats;
private final Map<String, Stats> statsPerPipeline; private final List<PipelineStat> pipelineStats;
private final Map<String, List<ProcessorStat>> processorStats;
public IngestStats(Stats totalStats, Map<String, Stats> statsPerPipeline) { /**
* @param totalStats - The total stats for Ingest. This is the logically the sum of all pipeline stats,
* and pipeline stats are logically the sum of the processor stats.
* @param pipelineStats - The stats for a given ingest pipeline.
* @param processorStats - The per-processor stats for a given pipeline. A map keyed by the pipeline identifier.
*/
public IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Map<String, List<ProcessorStat>> processorStats) {
this.totalStats = totalStats; this.totalStats = totalStats;
this.statsPerPipeline = statsPerPipeline; this.pipelineStats = pipelineStats;
this.processorStats = processorStats;
} }
/** /**
@ -46,37 +58,47 @@ public class IngestStats implements Writeable, ToXContentFragment {
public IngestStats(StreamInput in) throws IOException { public IngestStats(StreamInput in) throws IOException {
this.totalStats = new Stats(in); this.totalStats = new Stats(in);
int size = in.readVInt(); int size = in.readVInt();
this.statsPerPipeline = new HashMap<>(size); this.pipelineStats = new ArrayList<>(size);
this.processorStats = new HashMap<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
statsPerPipeline.put(in.readString(), new Stats(in)); String pipelineId = in.readString();
Stats pipelineStat = new Stats(in);
this.pipelineStats.add(new PipelineStat(pipelineId, pipelineStat));
if (in.getVersion().onOrAfter(Version.V_6_5_0)) {
int processorsSize = in.readVInt();
List<ProcessorStat> processorStatsPerPipeline = new ArrayList<>(processorsSize);
for (int j = 0; j < processorsSize; j++) {
String processorName = in.readString();
Stats processorStat = new Stats(in);
processorStatsPerPipeline.add(new ProcessorStat(processorName, processorStat));
}
this.processorStats.put(pipelineId, processorStatsPerPipeline);
}
} }
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
totalStats.writeTo(out); totalStats.writeTo(out);
out.writeVInt(statsPerPipeline.size()); out.writeVInt(pipelineStats.size());
for (Map.Entry<String, Stats> entry : statsPerPipeline.entrySet()) { for (PipelineStat pipelineStat : pipelineStats) {
out.writeString(entry.getKey()); out.writeString(pipelineStat.getPipelineId());
entry.getValue().writeTo(out); pipelineStat.getStats().writeTo(out);
if (out.getVersion().onOrAfter(Version.V_6_5_0)) {
List<ProcessorStat> processorStatsForPipeline = processorStats.get(pipelineStat.getPipelineId());
if (processorStatsForPipeline == null) {
out.writeVInt(0);
} else {
out.writeVInt(processorStatsForPipeline.size());
for (ProcessorStat processorStat : processorStatsForPipeline) {
out.writeString(processorStat.getName());
processorStat.getStats().writeTo(out);
}
}
}
} }
} }
/**
* @return The accumulated stats for all pipelines
*/
public Stats getTotalStats() {
return totalStats;
}
/**
* @return The stats on a per pipeline basis
*/
public Map<String, Stats> getStatsPerPipeline() {
return statsPerPipeline;
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("ingest"); builder.startObject("ingest");
@ -84,9 +106,21 @@ public class IngestStats implements Writeable, ToXContentFragment {
totalStats.toXContent(builder, params); totalStats.toXContent(builder, params);
builder.endObject(); builder.endObject();
builder.startObject("pipelines"); builder.startObject("pipelines");
for (Map.Entry<String, Stats> entry : statsPerPipeline.entrySet()) { for (PipelineStat pipelineStat : pipelineStats) {
builder.startObject(entry.getKey()); builder.startObject(pipelineStat.getPipelineId());
entry.getValue().toXContent(builder, params); pipelineStat.getStats().toXContent(builder, params);
List<ProcessorStat> processorStatsForPipeline = processorStats.get(pipelineStat.getPipelineId());
builder.startArray("processors");
if (processorStatsForPipeline != null) {
for (ProcessorStat processorStat : processorStatsForPipeline) {
builder.startObject();
builder.startObject(processorStat.getName());
processorStat.getStats().toXContent(builder, params);
builder.endObject();
builder.endObject();
}
}
builder.endArray();
builder.endObject(); builder.endObject();
} }
builder.endObject(); builder.endObject();
@ -94,6 +128,18 @@ public class IngestStats implements Writeable, ToXContentFragment {
return builder; return builder;
} }
public Stats getTotalStats() {
return totalStats;
}
public List<PipelineStat> getPipelineStats() {
return pipelineStats;
}
public Map<String, List<ProcessorStat>> getProcessorStats() {
return processorStats;
}
public static class Stats implements Writeable, ToXContentFragment { public static class Stats implements Writeable, ToXContentFragment {
private final long ingestCount; private final long ingestCount;
@ -134,7 +180,6 @@ public class IngestStats implements Writeable, ToXContentFragment {
} }
/** /**
*
* @return The total time spent of ingest preprocessing in millis. * @return The total time spent of ingest preprocessing in millis.
*/ */
public long getIngestTimeInMillis() { public long getIngestTimeInMillis() {
@ -164,4 +209,77 @@ public class IngestStats implements Writeable, ToXContentFragment {
return builder; return builder;
} }
} }
/**
* Easy conversion from scoped {@link IngestMetric} objects to a serializable Stats objects
*/
static class Builder {
private Stats totalStats;
private List<PipelineStat> pipelineStats = new ArrayList<>();
private Map<String, List<ProcessorStat>> processorStats = new HashMap<>();
Builder addTotalMetrics(IngestMetric totalMetric) {
this.totalStats = totalMetric.createStats();
return this;
}
Builder addPipelineMetrics(String pipelineId, IngestMetric pipelineMetric) {
this.pipelineStats.add(new PipelineStat(pipelineId, pipelineMetric.createStats()));
return this;
}
Builder addProcessorMetrics(String pipelineId, String processorName, IngestMetric metric) {
this.processorStats.computeIfAbsent(pipelineId, k -> new ArrayList<>())
.add(new ProcessorStat(processorName, metric.createStats()));
return this;
}
IngestStats build() {
return new IngestStats(totalStats, Collections.unmodifiableList(pipelineStats),
Collections.unmodifiableMap(processorStats));
}
}
/**
* Container for pipeline stats.
*/
public static class PipelineStat {
private final String pipelineId;
private final Stats stats;
public PipelineStat(String pipelineId, Stats stats) {
this.pipelineId = pipelineId;
this.stats = stats;
}
public String getPipelineId() {
return pipelineId;
}
public Stats getStats() {
return stats;
}
}
/**
* Container for processor stats.
*/
public static class ProcessorStat {
private final String name;
private final Stats stats;
public ProcessorStat(String name, Stats stats) {
this.name = name;
this.stats = stats;
}
public String getName() {
return name;
}
public Stats getStats() {
return stats;
}
}
} }

View File

@ -22,11 +22,12 @@ package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import java.time.Clock;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.LongSupplier;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
@ -47,20 +48,21 @@ public final class Pipeline {
private final Integer version; private final Integer version;
private final CompoundProcessor compoundProcessor; private final CompoundProcessor compoundProcessor;
private final IngestMetric metrics; private final IngestMetric metrics;
private final Clock clock; private final LongSupplier relativeTimeProvider;
public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) { public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) {
this(id, description, version, compoundProcessor, Clock.systemUTC()); this(id, description, version, compoundProcessor, System::nanoTime);
} }
//package private for testing //package private for testing
Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor, Clock clock) { Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor,
LongSupplier relativeTimeProvider) {
this.id = id; this.id = id;
this.description = description; this.description = description;
this.compoundProcessor = compoundProcessor; this.compoundProcessor = compoundProcessor;
this.version = version; this.version = version;
this.metrics = new IngestMetric(); this.metrics = new IngestMetric();
this.clock = clock; this.relativeTimeProvider = relativeTimeProvider;
} }
public static Pipeline create(String id, Map<String, Object> config, public static Pipeline create(String id, Map<String, Object> config,
@ -89,7 +91,7 @@ public final class Pipeline {
* Modifies the data of a document to be indexed based on the processor this pipeline holds * Modifies the data of a document to be indexed based on the processor this pipeline holds
*/ */
public IngestDocument execute(IngestDocument ingestDocument) throws Exception { public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
long startTimeInMillis = clock.millis(); long startTimeInNanos = relativeTimeProvider.getAsLong();
try { try {
metrics.preIngest(); metrics.preIngest();
return compoundProcessor.execute(ingestDocument); return compoundProcessor.execute(ingestDocument);
@ -97,7 +99,7 @@ public final class Pipeline {
metrics.ingestFailed(); metrics.ingestFailed();
throw e; throw e;
} finally { } finally {
long ingestTimeInMillis = clock.millis() - startTimeInMillis; long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos);
metrics.postIngest(ingestTimeInMillis); metrics.postIngest(ingestTimeInMillis);
} }
} }

View File

@ -53,6 +53,10 @@ public class PipelineProcessor extends AbstractProcessor {
return TYPE; return TYPE;
} }
String getPipelineName() {
return pipelineName;
}
public static final class Factory implements Processor.Factory { public static final class Factory implements Processor.Factory {
private final IngestService ingestService; private final IngestService ingestService;
@ -65,7 +69,7 @@ public class PipelineProcessor extends AbstractProcessor {
public PipelineProcessor create(Map<String, Processor.Factory> registry, String processorTag, public PipelineProcessor create(Map<String, Processor.Factory> registry, String processorTag,
Map<String, Object> config) throws Exception { Map<String, Object> config) throws Exception {
String pipeline = String pipeline =
ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "pipeline"); ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "name");
return new PipelineProcessor(processorTag, pipeline, ingestService); return new PipelineProcessor(processorTag, pipeline, ingestService);
} }
} }

View File

@ -19,11 +19,11 @@
package org.elasticsearch.ingest; package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.action.ingest.SimulateProcessorResult;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* Processor to be used within Simulate API to keep track of processors executed in pipeline. * Processor to be used within Simulate API to keep track of processors executed in pipeline.
@ -42,14 +42,46 @@ public final class TrackingResultProcessor implements Processor {
@Override @Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception { public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
Processor processor = actualProcessor;
try { try {
actualProcessor.execute(ingestDocument); if (processor instanceof ConditionalProcessor) {
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), new IngestDocument(ingestDocument))); ConditionalProcessor conditionalProcessor = (ConditionalProcessor) processor;
if (conditionalProcessor.evaluate(ingestDocument) == false) {
return ingestDocument;
}
if (conditionalProcessor.getProcessor() instanceof PipelineProcessor) {
processor = conditionalProcessor.getProcessor();
}
}
if (processor instanceof PipelineProcessor) {
PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor);
Pipeline pipeline = pipelineProcessor.getPipeline();
//runtime check for cycles against a copy of the document. This is needed to properly handle conditionals around pipelines
try {
IngestDocument ingestDocumentCopy = new IngestDocument(ingestDocument);
ingestDocumentCopy.executePipeline(pipelineProcessor.getPipeline());
} catch (ElasticsearchException elasticsearchException) {
if (elasticsearchException.getCause().getCause() instanceof IllegalStateException) {
throw elasticsearchException;
}
//else do nothing, let the tracking processors throw the exception while recording the path up to the failure
} catch (Exception e) {
// do nothing, let the tracking processors throw the exception while recording the path up to the failure
}
//now that we know that there are no cycles between pipelines, decorate the processors for this pipeline and execute it
CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList);
Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(),
verbosePipelineProcessor);
ingestDocument.executePipeline(verbosePipeline);
} else {
processor.execute(ingestDocument);
processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument)));
}
} catch (Exception e) { } catch (Exception e) {
if (ignoreFailure) { if (ignoreFailure) {
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), new IngestDocument(ingestDocument), e)); processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument), e));
} else { } else {
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), e)); processorResultList.add(new SimulateProcessorResult(processor.getTag(), e));
} }
throw e; throw e;
} }
@ -66,35 +98,19 @@ public final class TrackingResultProcessor implements Processor {
return actualProcessor.getTag(); return actualProcessor.getTag();
} }
public static CompoundProcessor decorate(CompoundProcessor compoundProcessor, List<SimulateProcessorResult> processorResultList, public static CompoundProcessor decorate(CompoundProcessor compoundProcessor, List<SimulateProcessorResult> processorResultList) {
Set<PipelineProcessor> pipelinesSeen) {
List<Processor> processors = new ArrayList<>(compoundProcessor.getProcessors().size()); List<Processor> processors = new ArrayList<>(compoundProcessor.getProcessors().size());
for (Processor processor : compoundProcessor.getProcessors()) { for (Processor processor : compoundProcessor.getProcessors()) {
if (processor instanceof PipelineProcessor) { if (processor instanceof CompoundProcessor) {
PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); processors.add(decorate((CompoundProcessor) processor, processorResultList));
if (pipelinesSeen.add(pipelineProcessor) == false) {
throw new IllegalStateException("Cycle detected for pipeline: " + pipelineProcessor.getPipeline().getId());
}
processors.add(decorate(pipelineProcessor.getPipeline().getCompoundProcessor(), processorResultList, pipelinesSeen));
pipelinesSeen.remove(pipelineProcessor);
} else if (processor instanceof CompoundProcessor) {
processors.add(decorate((CompoundProcessor) processor, processorResultList, pipelinesSeen));
} else { } else {
processors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList)); processors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList));
} }
} }
List<Processor> onFailureProcessors = new ArrayList<>(compoundProcessor.getProcessors().size()); List<Processor> onFailureProcessors = new ArrayList<>(compoundProcessor.getProcessors().size());
for (Processor processor : compoundProcessor.getOnFailureProcessors()) { for (Processor processor : compoundProcessor.getOnFailureProcessors()) {
if (processor instanceof PipelineProcessor) { if (processor instanceof CompoundProcessor) {
PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList));
if (pipelinesSeen.add(pipelineProcessor) == false) {
throw new IllegalStateException("Cycle detected for pipeline: " + pipelineProcessor.getPipeline().getId());
}
onFailureProcessors.add(decorate(pipelineProcessor.getPipeline().getCompoundProcessor(), processorResultList,
pipelinesSeen));
pipelinesSeen.remove(pipelineProcessor);
} else if (processor instanceof CompoundProcessor) {
onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList, pipelinesSeen));
} else { } else {
onFailureProcessors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList)); onFailureProcessors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList));
} }

View File

@ -133,11 +133,7 @@ public class ExceptionSerializationTests extends ESTestCase {
@Override @Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
Path next = pkgPrefix.resolve(dir.getFileName()); pkgPrefix = pkgPrefix.resolve(dir.getFileName());
if (ignore.contains(next)) {
return FileVisitResult.SKIP_SUBTREE;
}
pkgPrefix = next;
return FileVisitResult.CONTINUE; return FileVisitResult.CONTINUE;
} }

View File

@ -53,7 +53,6 @@ import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet; import static java.util.Collections.emptySet;
public class NodeStatsTests extends ESTestCase { public class NodeStatsTests extends ESTestCase {
public void testSerialization() throws IOException { public void testSerialization() throws IOException {
NodeStats nodeStats = createNodeStats(); NodeStats nodeStats = createNodeStats();
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
@ -271,14 +270,29 @@ public class NodeStatsTests extends ESTestCase {
assertEquals(totalStats.getIngestCurrent(), deserializedIngestStats.getTotalStats().getIngestCurrent()); assertEquals(totalStats.getIngestCurrent(), deserializedIngestStats.getTotalStats().getIngestCurrent());
assertEquals(totalStats.getIngestFailedCount(), deserializedIngestStats.getTotalStats().getIngestFailedCount()); assertEquals(totalStats.getIngestFailedCount(), deserializedIngestStats.getTotalStats().getIngestFailedCount());
assertEquals(totalStats.getIngestTimeInMillis(), deserializedIngestStats.getTotalStats().getIngestTimeInMillis()); assertEquals(totalStats.getIngestTimeInMillis(), deserializedIngestStats.getTotalStats().getIngestTimeInMillis());
assertEquals(ingestStats.getStatsPerPipeline().size(), deserializedIngestStats.getStatsPerPipeline().size()); assertEquals(ingestStats.getPipelineStats().size(), deserializedIngestStats.getPipelineStats().size());
for (Map.Entry<String, IngestStats.Stats> entry : ingestStats.getStatsPerPipeline().entrySet()) { for (IngestStats.PipelineStat pipelineStat : ingestStats.getPipelineStats()) {
IngestStats.Stats stats = entry.getValue(); String pipelineId = pipelineStat.getPipelineId();
IngestStats.Stats deserializedStats = deserializedIngestStats.getStatsPerPipeline().get(entry.getKey()); IngestStats.Stats deserializedPipelineStats =
assertEquals(stats.getIngestFailedCount(), deserializedStats.getIngestFailedCount()); getPipelineStats(deserializedIngestStats.getPipelineStats(), pipelineId);
assertEquals(stats.getIngestTimeInMillis(), deserializedStats.getIngestTimeInMillis()); assertEquals(pipelineStat.getStats().getIngestFailedCount(), deserializedPipelineStats.getIngestFailedCount());
assertEquals(stats.getIngestCurrent(), deserializedStats.getIngestCurrent()); assertEquals(pipelineStat.getStats().getIngestTimeInMillis(), deserializedPipelineStats.getIngestTimeInMillis());
assertEquals(stats.getIngestCount(), deserializedStats.getIngestCount()); assertEquals(pipelineStat.getStats().getIngestCurrent(), deserializedPipelineStats.getIngestCurrent());
assertEquals(pipelineStat.getStats().getIngestCount(), deserializedPipelineStats.getIngestCount());
List<IngestStats.ProcessorStat> processorStats = ingestStats.getProcessorStats().get(pipelineId);
//intentionally validating identical order
Iterator<IngestStats.ProcessorStat> it = deserializedIngestStats.getProcessorStats().get(pipelineId).iterator();
for (IngestStats.ProcessorStat processorStat : processorStats) {
IngestStats.ProcessorStat deserializedProcessorStat = it.next();
assertEquals(processorStat.getStats().getIngestFailedCount(),
deserializedProcessorStat.getStats().getIngestFailedCount());
assertEquals(processorStat.getStats().getIngestTimeInMillis(),
deserializedProcessorStat.getStats().getIngestTimeInMillis());
assertEquals(processorStat.getStats().getIngestCurrent(),
deserializedProcessorStat.getStats().getIngestCurrent());
assertEquals(processorStat.getStats().getIngestCount(), deserializedProcessorStat.getStats().getIngestCount());
}
assertFalse(it.hasNext());
} }
} }
AdaptiveSelectionStats adaptiveStats = nodeStats.getAdaptiveSelectionStats(); AdaptiveSelectionStats adaptiveStats = nodeStats.getAdaptiveSelectionStats();
@ -429,14 +443,24 @@ public class NodeStatsTests extends ESTestCase {
if (frequently()) { if (frequently()) {
IngestStats.Stats totalStats = new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), IngestStats.Stats totalStats = new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong()); randomNonNegativeLong());
int numPipelines = randomIntBetween(0, 10);
int numProcessors = randomIntBetween(0, 10);
List<IngestStats.PipelineStat> ingestPipelineStats = new ArrayList<>(numPipelines);
Map<String, List<IngestStats.ProcessorStat>> ingestProcessorStats = new HashMap<>(numPipelines);
for (int i = 0; i < numPipelines; i++) {
String pipelineId = randomAlphaOfLengthBetween(3, 10);
ingestPipelineStats.add(new IngestStats.PipelineStat(pipelineId, new IngestStats.Stats
(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())));
int numStatsPerPipeline = randomIntBetween(0, 10); List<IngestStats.ProcessorStat> processorPerPipeline = new ArrayList<>(numProcessors);
Map<String, IngestStats.Stats> statsPerPipeline = new HashMap<>(); for (int j =0; j < numProcessors;j++) {
for (int i = 0; i < numStatsPerPipeline; i++) { IngestStats.Stats processorStats = new IngestStats.Stats
statsPerPipeline.put(randomAlphaOfLengthBetween(3, 10), new IngestStats.Stats(randomNonNegativeLong(), (randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong());
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); processorPerPipeline.add(new IngestStats.ProcessorStat(randomAlphaOfLengthBetween(3, 10), processorStats));
}
ingestProcessorStats.put(pipelineId,processorPerPipeline);
} }
ingestStats = new IngestStats(totalStats, statsPerPipeline); ingestStats = new IngestStats(totalStats, ingestPipelineStats, ingestProcessorStats);
} }
AdaptiveSelectionStats adaptiveSelectionStats = null; AdaptiveSelectionStats adaptiveSelectionStats = null;
if (frequently()) { if (frequently()) {
@ -465,4 +489,8 @@ public class NodeStatsTests extends ESTestCase {
fsInfo, transportStats, httpStats, allCircuitBreakerStats, scriptStats, discoveryStats, fsInfo, transportStats, httpStats, allCircuitBreakerStats, scriptStats, discoveryStats,
ingestStats, adaptiveSelectionStats); ingestStats, adaptiveSelectionStats);
} }
private IngestStats.Stats getPipelineStats(List<IngestStats.PipelineStat> pipelineStats, String id) {
return pipelineStats.stream().filter(p1 -> p1.getPipelineId().equals(id)).findFirst().map(p2 -> p2.getStats()).orElse(null);
}
} }

View File

@ -25,7 +25,6 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -42,7 +41,7 @@ import static org.joda.time.DateTimeZone.UTC;
public class DateMathExpressionResolverTests extends ESTestCase { public class DateMathExpressionResolverTests extends ESTestCase {
private final DateMathExpressionResolver expressionResolver = new DateMathExpressionResolver(Settings.EMPTY); private final DateMathExpressionResolver expressionResolver = new DateMathExpressionResolver();
private final Context context = new Context( private final Context context = new Context(
ClusterState.builder(new ClusterName("_name")).build(), IndicesOptions.strictExpand() ClusterState.builder(new ClusterName("_name")).build(), IndicesOptions.strictExpand()
); );
@ -118,37 +117,6 @@ public class DateMathExpressionResolverTests extends ESTestCase {
assertThat(result.get(3), equalTo(".logstash-" + DateTimeFormat.forPattern("YYYY.MM").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1)))); assertThat(result.get(3), equalTo(".logstash-" + DateTimeFormat.forPattern("YYYY.MM").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1))));
} }
public void testExpression_CustomTimeZoneInSetting() throws Exception {
DateTimeZone timeZone;
int hoursOffset;
int minutesOffset = 0;
if (randomBoolean()) {
hoursOffset = randomIntBetween(-12, 14);
timeZone = DateTimeZone.forOffsetHours(hoursOffset);
} else {
hoursOffset = randomIntBetween(-11, 13);
minutesOffset = randomIntBetween(0, 59);
timeZone = DateTimeZone.forOffsetHoursMinutes(hoursOffset, minutesOffset);
}
DateTime now;
if (hoursOffset >= 0) {
// rounding to next day 00:00
now = DateTime.now(UTC).plusHours(hoursOffset).plusMinutes(minutesOffset).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0);
} else {
// rounding to today 00:00
now = DateTime.now(UTC).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0);
}
Settings settings = Settings.builder()
.put("date_math_expression_resolver.default_time_zone", timeZone.getID())
.build();
DateMathExpressionResolver expressionResolver = new DateMathExpressionResolver(settings);
Context context = new Context(this.context.getState(), this.context.getOptions(), now.getMillis());
List<String> results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd}}>"));
assertThat(results.size(), equalTo(1));
logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0));
assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.withZone(timeZone))));
}
public void testExpression_CustomTimeZoneInIndexName() throws Exception { public void testExpression_CustomTimeZoneInIndexName() throws Exception {
DateTimeZone timeZone; DateTimeZone timeZone;
int hoursOffset; int hoursOffset;

View File

@ -51,6 +51,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidAliasNameException;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
@ -82,7 +83,7 @@ import static org.mockito.Mockito.when;
public class IndexCreationTaskTests extends ESTestCase { public class IndexCreationTaskTests extends ESTestCase {
private final IndicesService indicesService = mock(IndicesService.class); private final IndicesService indicesService = mock(IndicesService.class);
private final AliasValidator aliasValidator = mock(AliasValidator.class); private final AliasValidator aliasValidator = new AliasValidator(Settings.EMPTY);
private final NamedXContentRegistry xContentRegistry = mock(NamedXContentRegistry.class); private final NamedXContentRegistry xContentRegistry = mock(NamedXContentRegistry.class);
private final CreateIndexClusterStateUpdateRequest request = mock(CreateIndexClusterStateUpdateRequest.class); private final CreateIndexClusterStateUpdateRequest request = mock(CreateIndexClusterStateUpdateRequest.class);
private final Logger logger = mock(Logger.class); private final Logger logger = mock(Logger.class);
@ -149,6 +150,12 @@ public class IndexCreationTaskTests extends ESTestCase {
assertThat(getMappingsFromResponse(), Matchers.hasKey("mapping1")); assertThat(getMappingsFromResponse(), Matchers.hasKey("mapping1"));
} }
public void testInvalidAliasName() throws Exception {
final String[] invalidAliasNames = new String[] { "-alias1", "+alias2", "_alias3", "a#lias", "al:ias", ".", ".." };
setupRequestAlias(new Alias(randomFrom(invalidAliasNames)));
expectThrows(InvalidAliasNameException.class, this::executeTask);
}
public void testRequestDataHavePriorityOverTemplateData() throws Exception { public void testRequestDataHavePriorityOverTemplateData() throws Exception {
final CompressedXContent tplMapping = createMapping("text"); final CompressedXContent tplMapping = createMapping("text");
final CompressedXContent reqMapping = createMapping("keyword"); final CompressedXContent reqMapping = createMapping("keyword");

View File

@ -66,6 +66,14 @@ public class MetaDataTests extends ESTestCase {
assertThat(aliases.size(), equalTo(0)); assertThat(aliases.size(), equalTo(0));
} }
{ {
final GetAliasesRequest request;
if (randomBoolean()) {
request = new GetAliasesRequest();
} else {
request = new GetAliasesRequest(randomFrom("alias1", "alias2"));
// replacing with empty aliases behaves as if aliases were unspecified at request building
request.replaceAliases(Strings.EMPTY_ARRAY);
}
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAliases(new GetAliasesRequest(), new String[]{"index"}); ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAliases(new GetAliasesRequest(), new String[]{"index"});
assertThat(aliases.size(), equalTo(1)); assertThat(aliases.size(), equalTo(1));
List<AliasMetaData> aliasMetaDataList = aliases.get("index"); List<AliasMetaData> aliasMetaDataList = aliases.get("index");
@ -73,12 +81,6 @@ public class MetaDataTests extends ESTestCase {
assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2"));
} }
{
GetAliasesRequest getAliasesRequest = new GetAliasesRequest("alias1");
getAliasesRequest.replaceAliases(Strings.EMPTY_ARRAY);
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAliases(getAliasesRequest, new String[]{"index"});
assertThat(aliases.size(), equalTo(0));
}
{ {
ImmutableOpenMap<String, List<AliasMetaData>> aliases = ImmutableOpenMap<String, List<AliasMetaData>> aliases =
metaData.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"}); metaData.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"});

View File

@ -156,7 +156,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
} }
public void testParsingAndToQuery2() throws IOException { public void testParsingAndToQuery2() throws IOException {
@ -166,7 +166,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" \"" + GEO_POINT_FIELD_NAME + "\":[-70, 40]\n" + " \"" + GEO_POINT_FIELD_NAME + "\":[-70, 40]\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
} }
public void testParsingAndToQuery3() throws IOException { public void testParsingAndToQuery3() throws IOException {
@ -176,7 +176,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" \"" + GEO_POINT_FIELD_NAME + "\":\"40, -70\"\n" + " \"" + GEO_POINT_FIELD_NAME + "\":\"40, -70\"\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
} }
public void testParsingAndToQuery4() throws IOException { public void testParsingAndToQuery4() throws IOException {
@ -186,7 +186,8 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" \"" + GEO_POINT_FIELD_NAME + "\":\"drn5x1g8cu2y\"\n" + " \"" + GEO_POINT_FIELD_NAME + "\":\"drn5x1g8cu2y\"\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); GeoPoint geoPoint = GeoPoint.fromGeohash("drn5x1g8cu2y");
assertGeoDistanceRangeQuery(query, geoPoint.getLat(), geoPoint.getLon(), 12, DistanceUnit.MILES);
} }
public void testParsingAndToQuery5() throws IOException { public void testParsingAndToQuery5() throws IOException {
@ -200,7 +201,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
} }
public void testParsingAndToQuery6() throws IOException { public void testParsingAndToQuery6() throws IOException {
@ -214,7 +215,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
} }
public void testParsingAndToQuery7() throws IOException { public void testParsingAndToQuery7() throws IOException {
@ -227,7 +228,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 0.012, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.DEFAULT);
} }
public void testParsingAndToQuery8() throws IOException { public void testParsingAndToQuery8() throws IOException {
@ -240,7 +241,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.KILOMETERS); assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.DEFAULT);
} }
public void testParsingAndToQuery9() throws IOException { public void testParsingAndToQuery9() throws IOException {
@ -254,7 +255,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.KILOMETERS);
} }
public void testParsingAndToQuery10() throws IOException { public void testParsingAndToQuery10() throws IOException {
@ -268,7 +269,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.KILOMETERS);
} }
public void testParsingAndToQuery11() throws IOException { public void testParsingAndToQuery11() throws IOException {
@ -281,7 +282,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.KILOMETERS);
} }
public void testParsingAndToQuery12() throws IOException { public void testParsingAndToQuery12() throws IOException {
@ -295,13 +296,16 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
" }\n" + " }\n" +
" }\n" + " }\n" +
"}\n"; "}\n";
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT); assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
} }
private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit) private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit)
throws IOException { throws IOException {
parseQuery(query).toQuery(createShardContext()); Query parsedQuery = parseQuery(query).toQuery(createShardContext());
// TODO: what can we check? See https://github.com/elastic/elasticsearch/issues/34043 // The parsedQuery contains IndexOrDocValuesQuery, which wraps LatLonPointDistanceQuery which in turn has default visibility,
// so we cannot access its fields directly to check and have to use toString() here instead.
assertEquals(parsedQuery.toString(),
"mapped_geo_point:" + lat + "," + lon + " +/- " + distanceUnit.toMeters(distance) + " meters");
} }
public void testFromJson() throws IOException { public void testFromJson() throws IOException {

View File

@ -27,11 +27,17 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.LongSupplier;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class CompoundProcessorTests extends ESTestCase { public class CompoundProcessorTests extends ESTestCase {
private IngestDocument ingestDocument; private IngestDocument ingestDocument;
@ -49,18 +55,29 @@ public class CompoundProcessorTests extends ESTestCase {
} }
public void testSingleProcessor() throws Exception { public void testSingleProcessor() throws Exception {
TestProcessor processor = new TestProcessor(ingestDocument -> {}); LongSupplier relativeTimeProvider = mock(LongSupplier.class);
CompoundProcessor compoundProcessor = new CompoundProcessor(processor); when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1));
TestProcessor processor = new TestProcessor(ingestDocument ->{
assertStats(0, ingestDocument.getFieldValue("compoundProcessor", CompoundProcessor.class), 1, 0, 0, 0);
});
CompoundProcessor compoundProcessor = new CompoundProcessor(relativeTimeProvider, processor);
ingestDocument.setFieldValue("compoundProcessor", compoundProcessor); //ugly hack to assert current count = 1
assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getProcessors().size(), equalTo(1));
assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor));
assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor));
assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true));
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
verify(relativeTimeProvider, times(2)).getAsLong();
assertThat(processor.getInvokedCounter(), equalTo(1)); assertThat(processor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 0, 1);
} }
public void testSingleProcessorWithException() throws Exception { public void testSingleProcessorWithException() throws Exception {
TestProcessor processor = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processor = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");});
CompoundProcessor compoundProcessor = new CompoundProcessor(processor); LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(relativeTimeProvider, processor);
assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getProcessors().size(), equalTo(1));
assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor));
assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true));
@ -71,15 +88,22 @@ public class CompoundProcessorTests extends ESTestCase {
assertThat(e.getRootCause().getMessage(), equalTo("error")); assertThat(e.getRootCause().getMessage(), equalTo("error"));
} }
assertThat(processor.getInvokedCounter(), equalTo(1)); assertThat(processor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
} }
public void testIgnoreFailure() throws Exception { public void testIgnoreFailure() throws Exception {
TestProcessor processor1 = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processor1 = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");});
TestProcessor processor2 = new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue("field", "value");}); TestProcessor processor2 = new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue("field", "value");});
CompoundProcessor compoundProcessor = new CompoundProcessor(true, Arrays.asList(processor1, processor2), Collections.emptyList()); LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor =
new CompoundProcessor(true, Arrays.asList(processor1, processor2), Collections.emptyList(), relativeTimeProvider);
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(0, compoundProcessor, 0, 1, 1, 0);
assertThat(processor2.getInvokedCounter(), equalTo(1)); assertThat(processor2.getInvokedCounter(), equalTo(1));
assertStats(1, compoundProcessor, 0, 1, 0, 0);
assertThat(ingestDocument.getFieldValue("field", String.class), equalTo("value")); assertThat(ingestDocument.getFieldValue("field", String.class), equalTo("value"));
} }
@ -93,11 +117,15 @@ public class CompoundProcessorTests extends ESTestCase {
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id"));
}); });
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1));
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1), CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1),
Collections.singletonList(processor2)); Collections.singletonList(processor2), relativeTimeProvider);
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
verify(relativeTimeProvider, times(2)).getAsLong();
assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 1);
assertThat(processor2.getInvokedCounter(), equalTo(1)); assertThat(processor2.getInvokedCounter(), equalTo(1));
} }
@ -118,14 +146,17 @@ public class CompoundProcessorTests extends ESTestCase {
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id2")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id2"));
}); });
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(false, Collections.singletonList(processorToFail), CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(false, Collections.singletonList(processorToFail),
Collections.singletonList(lastProcessor)); Collections.singletonList(lastProcessor), relativeTimeProvider);
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor), CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor),
Collections.singletonList(compoundOnFailProcessor)); Collections.singletonList(compoundOnFailProcessor), relativeTimeProvider);
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
assertThat(processorToFail.getInvokedCounter(), equalTo(1)); assertThat(processorToFail.getInvokedCounter(), equalTo(1));
assertThat(lastProcessor.getInvokedCounter(), equalTo(1)); assertThat(lastProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
} }
public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception { public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception {
@ -137,21 +168,24 @@ public class CompoundProcessorTests extends ESTestCase {
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id1")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id1"));
}); });
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor failCompoundProcessor = new CompoundProcessor(firstProcessor); CompoundProcessor failCompoundProcessor = new CompoundProcessor(relativeTimeProvider, firstProcessor);
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor),
Collections.singletonList(secondProcessor)); Collections.singletonList(secondProcessor), relativeTimeProvider);
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
} }
public void testCompoundProcessorExceptionFail() throws Exception { public void testCompoundProcessorExceptionFail() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");});
TestProcessor failProcessor = TestProcessor failProcessor =
new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");});
TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata(); Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.entrySet(), hasSize(3)); assertThat(ingestMetadata.entrySet(), hasSize(3));
@ -160,21 +194,24 @@ public class CompoundProcessorTests extends ESTestCase {
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail"));
}); });
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, Collections.singletonList(firstProcessor), CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, Collections.singletonList(firstProcessor),
Collections.singletonList(failProcessor)); Collections.singletonList(failProcessor), relativeTimeProvider);
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor),
Collections.singletonList(secondProcessor)); Collections.singletonList(secondProcessor), relativeTimeProvider);
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
} }
public void testCompoundProcessorExceptionFailInOnFailure() throws Exception { public void testCompoundProcessorExceptionFailInOnFailure() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");});
TestProcessor failProcessor = TestProcessor failProcessor =
new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");});
TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata(); Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.entrySet(), hasSize(3)); assertThat(ingestMetadata.entrySet(), hasSize(3));
@ -183,27 +220,44 @@ public class CompoundProcessorTests extends ESTestCase {
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail"));
}); });
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, Collections.singletonList(firstProcessor), CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, Collections.singletonList(firstProcessor),
Collections.singletonList(new CompoundProcessor(failProcessor))); Collections.singletonList(new CompoundProcessor(relativeTimeProvider, failProcessor)));
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor),
Collections.singletonList(secondProcessor)); Collections.singletonList(secondProcessor), relativeTimeProvider);
compoundProcessor.execute(ingestDocument); compoundProcessor.execute(ingestDocument);
assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
} }
public void testBreakOnFailure() throws Exception { public void testBreakOnFailure() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error1");}); TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error1");});
TestProcessor secondProcessor = new TestProcessor("id2", "second", ingestDocument -> {throw new RuntimeException("error2");}); TestProcessor secondProcessor = new TestProcessor("id2", "second", ingestDocument -> {throw new RuntimeException("error2");});
TestProcessor onFailureProcessor = new TestProcessor("id2", "on_failure", ingestDocument -> {}); TestProcessor onFailureProcessor = new TestProcessor("id2", "on_failure", ingestDocument -> {});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor pipeline = new CompoundProcessor(false, Arrays.asList(firstProcessor, secondProcessor), CompoundProcessor pipeline = new CompoundProcessor(false, Arrays.asList(firstProcessor, secondProcessor),
Collections.singletonList(onFailureProcessor)); Collections.singletonList(onFailureProcessor), relativeTimeProvider);
pipeline.execute(ingestDocument); pipeline.execute(ingestDocument);
assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(0)); assertThat(secondProcessor.getInvokedCounter(), equalTo(0));
assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1)); assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1));
assertStats(pipeline, 1, 1, 0);
}
private void assertStats(CompoundProcessor compoundProcessor, long count, long failed, long time) {
assertStats(0, compoundProcessor, 0L, count, failed, time);
}
private void assertStats(int processor, CompoundProcessor compoundProcessor, long current, long count, long failed, long time) {
IngestStats.Stats stats = compoundProcessor.getProcessorsWithMetrics().get(processor).v2().createStats();
assertThat(stats.getIngestCount(), equalTo(count));
assertThat(stats.getIngestCurrent(), equalTo(current));
assertThat(stats.getIngestFailedCount(), equalTo(failed));
assertThat(stats.getIngestTimeInMillis(), equalTo(time));
} }
} }

View File

@ -33,12 +33,18 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.LongSupplier;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ConditionalProcessorTests extends ESTestCase { public class ConditionalProcessorTests extends ESTestCase {
@ -60,6 +66,8 @@ public class ConditionalProcessorTests extends ESTestCase {
new HashMap<>(ScriptModule.CORE_CONTEXTS) new HashMap<>(ScriptModule.CORE_CONTEXTS)
); );
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1), 0L, TimeUnit.MILLISECONDS.toNanos(2));
ConditionalProcessor processor = new ConditionalProcessor( ConditionalProcessor processor = new ConditionalProcessor(
randomAlphaOfLength(10), randomAlphaOfLength(10),
new Script( new Script(
@ -67,7 +75,10 @@ public class ConditionalProcessorTests extends ESTestCase {
scriptName, Collections.emptyMap()), scriptService, scriptName, Collections.emptyMap()), scriptService,
new Processor() { new Processor() {
@Override @Override
public IngestDocument execute(final IngestDocument ingestDocument) throws Exception { public IngestDocument execute(final IngestDocument ingestDocument){
if(ingestDocument.hasField("error")){
throw new RuntimeException("error");
}
ingestDocument.setFieldValue("foo", "bar"); ingestDocument.setFieldValue("foo", "bar");
return ingestDocument; return ingestDocument;
} }
@ -81,20 +92,37 @@ public class ConditionalProcessorTests extends ESTestCase {
public String getTag() { public String getTag() {
return null; return null;
} }
}); }, relativeTimeProvider);
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue(conditionalField, trueValue);
processor.execute(ingestDocument);
assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue));
assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar"));
//false, never call processor never increments metrics
String falseValue = "falsy"; String falseValue = "falsy";
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue(conditionalField, falseValue); ingestDocument.setFieldValue(conditionalField, falseValue);
processor.execute(ingestDocument); processor.execute(ingestDocument);
assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue)); assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue));
assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo"))); assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo")));
assertStats(processor, 0, 0, 0);
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue(conditionalField, falseValue);
ingestDocument.setFieldValue("error", true);
processor.execute(ingestDocument);
assertStats(processor, 0, 0, 0);
//true, always call processor and increments metrics
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue(conditionalField, trueValue);
processor.execute(ingestDocument);
assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue));
assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar"));
assertStats(processor, 1, 0, 1);
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue(conditionalField, trueValue);
ingestDocument.setFieldValue("error", true);
IngestDocument finalIngestDocument = ingestDocument;
expectThrows(RuntimeException.class, () -> processor.execute(finalIngestDocument));
assertStats(processor, 2, 1, 2);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -141,5 +169,14 @@ public class ConditionalProcessorTests extends ESTestCase {
Exception e = expectedException.get(); Exception e = expectedException.get();
assertThat(e, instanceOf(UnsupportedOperationException.class)); assertThat(e, instanceOf(UnsupportedOperationException.class));
assertEquals("Mutating ingest documents in conditionals is not supported", e.getMessage()); assertEquals("Mutating ingest documents in conditionals is not supported", e.getMessage());
assertStats(processor, 0, 0, 0);
}
private static void assertStats(ConditionalProcessor conditionalProcessor, long count, long failed, long time) {
IngestStats.Stats stats = conditionalProcessor.getMetric().createStats();
assertThat(stats.getIngestCount(), equalTo(count));
assertThat(stats.getIngestCurrent(), equalTo(0L));
assertThat(stats.getIngestFailedCount(), equalTo(failed));
assertThat(stats.getIngestTimeInMillis(), greaterThanOrEqualTo(time));
} }
} }

View File

@ -63,6 +63,7 @@ import java.util.function.Consumer;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet; import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@ -746,16 +747,23 @@ public class IngestServiceTests extends ESTestCase {
verify(completionHandler, times(1)).accept(null); verify(completionHandler, times(1)).accept(null);
} }
public void testStats() { public void testStats() throws Exception {
final Processor processor = mock(Processor.class); final Processor processor = mock(Processor.class);
IngestService ingestService = createWithProcessors(Collections.singletonMap( final Processor processorFailure = mock(Processor.class);
"mock", (factories, tag, config) -> processor)); when(processor.getType()).thenReturn("mock");
when(processor.getTag()).thenReturn("mockTag");
when(processorFailure.getType()).thenReturn("failure-mock");
//avoid returning null and dropping the document
when(processor.execute(any(IngestDocument.class))).thenReturn( RandomDocumentPicks.randomIngestDocument(random()));
when(processorFailure.execute(any(IngestDocument.class))).thenThrow(new RuntimeException("error"));
Map<String, Processor.Factory> map = new HashMap<>(2);
map.put("mock", (factories, tag, config) -> processor);
map.put("failure-mock", (factories, tag, config) -> processorFailure);
IngestService ingestService = createWithProcessors(map);
final IngestStats initialStats = ingestService.stats(); final IngestStats initialStats = ingestService.stats();
assertThat(initialStats.getStatsPerPipeline().size(), equalTo(0)); assertThat(initialStats.getPipelineStats().size(), equalTo(0));
assertThat(initialStats.getTotalStats().getIngestCount(), equalTo(0L)); assertStats(initialStats.getTotalStats(), 0, 0, 0);
assertThat(initialStats.getTotalStats().getIngestCurrent(), equalTo(0L));
assertThat(initialStats.getTotalStats().getIngestFailedCount(), equalTo(0L));
assertThat(initialStats.getTotalStats().getIngestTimeInMillis(), equalTo(0L));
PutPipelineRequest putRequest = new PutPipelineRequest("_id1", PutPipelineRequest putRequest = new PutPipelineRequest("_id1",
new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON); new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON);
@ -769,7 +777,6 @@ public class IngestServiceTests extends ESTestCase {
clusterState = IngestService.innerPut(putRequest, clusterState); clusterState = IngestService.innerPut(putRequest, clusterState);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
@SuppressWarnings("unchecked") final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked") final Consumer<Exception> completionHandler = mock(Consumer.class); @SuppressWarnings("unchecked") final Consumer<Exception> completionHandler = mock(Consumer.class);
@ -778,18 +785,33 @@ public class IngestServiceTests extends ESTestCase {
indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10)); indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10));
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
final IngestStats afterFirstRequestStats = ingestService.stats(); final IngestStats afterFirstRequestStats = ingestService.stats();
assertThat(afterFirstRequestStats.getStatsPerPipeline().size(), equalTo(2)); assertThat(afterFirstRequestStats.getPipelineStats().size(), equalTo(2));
assertThat(afterFirstRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L));
assertThat(afterFirstRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(0L)); afterFirstRequestStats.getProcessorStats().get("_id1").forEach(p -> assertEquals(p.getName(), "mock:mockTag"));
assertThat(afterFirstRequestStats.getTotalStats().getIngestCount(), equalTo(1L)); afterFirstRequestStats.getProcessorStats().get("_id2").forEach(p -> assertEquals(p.getName(), "mock:mockTag"));
//total
assertStats(afterFirstRequestStats.getTotalStats(), 1, 0 ,0);
//pipeline
assertPipelineStats(afterFirstRequestStats.getPipelineStats(), "_id1", 1, 0, 0);
assertPipelineStats(afterFirstRequestStats.getPipelineStats(), "_id2", 0, 0, 0);
//processor
assertProcessorStats(0, afterFirstRequestStats, "_id1", 1, 0, 0);
assertProcessorStats(0, afterFirstRequestStats, "_id2", 0, 0, 0);
indexRequest.setPipeline("_id2"); indexRequest.setPipeline("_id2");
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
final IngestStats afterSecondRequestStats = ingestService.stats(); final IngestStats afterSecondRequestStats = ingestService.stats();
assertThat(afterSecondRequestStats.getStatsPerPipeline().size(), equalTo(2)); assertThat(afterSecondRequestStats.getPipelineStats().size(), equalTo(2));
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L)); //total
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); assertStats(afterSecondRequestStats.getTotalStats(), 2, 0 ,0);
assertThat(afterSecondRequestStats.getTotalStats().getIngestCount(), equalTo(2L)); //pipeline
assertPipelineStats(afterSecondRequestStats.getPipelineStats(), "_id1", 1, 0, 0);
assertPipelineStats(afterSecondRequestStats.getPipelineStats(), "_id2", 1, 0, 0);
//processor
assertProcessorStats(0, afterSecondRequestStats, "_id1", 1, 0, 0);
assertProcessorStats(0, afterSecondRequestStats, "_id2", 1, 0, 0);
//update cluster state and ensure that new stats are added to old stats //update cluster state and ensure that new stats are added to old stats
putRequest = new PutPipelineRequest("_id1", putRequest = new PutPipelineRequest("_id1",
@ -800,13 +822,66 @@ public class IngestServiceTests extends ESTestCase {
indexRequest.setPipeline("_id1"); indexRequest.setPipeline("_id1");
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
final IngestStats afterThirdRequestStats = ingestService.stats(); final IngestStats afterThirdRequestStats = ingestService.stats();
assertThat(afterThirdRequestStats.getStatsPerPipeline().size(), equalTo(2)); assertThat(afterThirdRequestStats.getPipelineStats().size(), equalTo(2));
assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(2L)); //total
assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); assertStats(afterThirdRequestStats.getTotalStats(), 3, 0 ,0);
assertThat(afterThirdRequestStats.getTotalStats().getIngestCount(), equalTo(3L)); //pipeline
assertPipelineStats(afterThirdRequestStats.getPipelineStats(), "_id1", 2, 0, 0);
assertPipelineStats(afterThirdRequestStats.getPipelineStats(), "_id2", 1, 0, 0);
//The number of processors for the "id1" pipeline changed, so the per-processor metrics are not carried forward. This is
//due to the parallel array's used to identify which metrics to carry forward. With out unique ids or semantic equals for each
//processor, parallel arrays are the best option for of carrying forward metrics between pipeline changes. However, in some cases,
//like this one it may not readily obvious why the metrics were not carried forward.
assertProcessorStats(0, afterThirdRequestStats, "_id1", 1, 0, 0);
assertProcessorStats(1, afterThirdRequestStats, "_id1", 1, 0, 0);
assertProcessorStats(0, afterThirdRequestStats, "_id2", 1, 0, 0);
//test a failure, and that the processor stats are added from the old stats
putRequest = new PutPipelineRequest("_id1",
new BytesArray("{\"processors\": [{\"failure-mock\" : { \"on_failure\": [{\"mock\" : {}}]}}, {\"mock\" : {}}]}"),
XContentType.JSON);
previousClusterState = clusterState;
clusterState = IngestService.innerPut(putRequest, clusterState);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
indexRequest.setPipeline("_id1");
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
final IngestStats afterForthRequestStats = ingestService.stats();
assertThat(afterForthRequestStats.getPipelineStats().size(), equalTo(2));
//total
assertStats(afterForthRequestStats.getTotalStats(), 4, 0 ,0);
//pipeline
assertPipelineStats(afterForthRequestStats.getPipelineStats(), "_id1", 3, 0, 0);
assertPipelineStats(afterForthRequestStats.getPipelineStats(), "_id2", 1, 0, 0);
//processor
assertProcessorStats(0, afterForthRequestStats, "_id1", 1, 1, 0); //not carried forward since type changed
assertProcessorStats(1, afterForthRequestStats, "_id1", 2, 0, 0); //carried forward and added from old stats
assertProcessorStats(0, afterForthRequestStats, "_id2", 1, 0, 0);
} }
public void testStatName(){
Processor processor = mock(Processor.class);
String name = randomAlphaOfLength(10);
when(processor.getType()).thenReturn(name);
assertThat(IngestService.getProcessorName(processor), equalTo(name));
String tag = randomAlphaOfLength(10);
when(processor.getTag()).thenReturn(tag);
assertThat(IngestService.getProcessorName(processor), equalTo(name + ":" + tag));
ConditionalProcessor conditionalProcessor = mock(ConditionalProcessor.class);
when(conditionalProcessor.getProcessor()).thenReturn(processor);
assertThat(IngestService.getProcessorName(conditionalProcessor), equalTo(name + ":" + tag));
PipelineProcessor pipelineProcessor = mock(PipelineProcessor.class);
String pipelineName = randomAlphaOfLength(10);
when(pipelineProcessor.getPipelineName()).thenReturn(pipelineName);
name = PipelineProcessor.TYPE;
when(pipelineProcessor.getType()).thenReturn(name);
assertThat(IngestService.getProcessorName(pipelineProcessor), equalTo(name + ":" + pipelineName));
when(pipelineProcessor.getTag()).thenReturn(tag);
assertThat(IngestService.getProcessorName(pipelineProcessor), equalTo(name + ":" + pipelineName + ":" + tag));
}
public void testExecuteWithDrop() { public void testExecuteWithDrop() {
Map<String, Processor.Factory> factories = new HashMap<>(); Map<String, Processor.Factory> factories = new HashMap<>();
factories.put("drop", new DropProcessor.Factory()); factories.put("drop", new DropProcessor.Factory());
@ -935,4 +1010,23 @@ public class IngestServiceTests extends ESTestCase {
return false; return false;
} }
} }
private void assertProcessorStats(int processor, IngestStats stats, String pipelineId, long count, long failed, long time) {
assertStats(stats.getProcessorStats().get(pipelineId).get(processor).getStats(), count, failed, time);
}
private void assertPipelineStats(List<IngestStats.PipelineStat> pipelineStats, String pipelineId, long count, long failed, long time) {
assertStats(getPipelineStats(pipelineStats, pipelineId), count, failed, time);
}
private void assertStats(IngestStats.Stats stats, long count, long failed, long time) {
assertThat(stats.getIngestCount(), equalTo(count));
assertThat(stats.getIngestCurrent(), equalTo(0L));
assertThat(stats.getIngestFailedCount(), equalTo(failed));
assertThat(stats.getIngestTimeInMillis(), greaterThanOrEqualTo(time));
}
private IngestStats.Stats getPipelineStats(List<IngestStats.PipelineStat> pipelineStats, String id) {
return pipelineStats.stream().filter(p1 -> p1.getPipelineId().equals(id)).findFirst().map(p2 -> p2.getStats()).orElse(null);
}
} }

View File

@ -19,44 +19,70 @@
package org.elasticsearch.ingest; package org.elasticsearch.ingest;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class IngestStatsTests extends ESTestCase { public class IngestStatsTests extends ESTestCase {
public void testSerialization() throws IOException { public void testSerialization() throws IOException {
IngestStats.Stats total = new IngestStats.Stats(5, 10, 20, 30); IngestStats.Stats totalStats = new IngestStats.Stats(50, 100, 200, 300);
IngestStats.Stats foo = new IngestStats.Stats(50, 100, 200, 300); List<IngestStats.PipelineStat> pipelineStats = createPipelineStats();
IngestStats ingestStats = new IngestStats(total, Collections.singletonMap("foo", foo)); Map<String, List<IngestStats.ProcessorStat>> processorStats = createProcessorStats(pipelineStats);
IngestStats serialize = serialize(ingestStats); IngestStats ingestStats = new IngestStats(totalStats, pipelineStats, processorStats);
assertNotSame(serialize, ingestStats); IngestStats serializedStats = serialize(ingestStats);
assertNotSame(serialize.getTotalStats(), total); assertIngestStats(ingestStats, serializedStats, true);
assertEquals(total.getIngestCount(), serialize.getTotalStats().getIngestCount()); }
assertEquals(total.getIngestFailedCount(), serialize.getTotalStats().getIngestFailedCount());
assertEquals(total.getIngestTimeInMillis(), serialize.getTotalStats().getIngestTimeInMillis());
assertEquals(total.getIngestCurrent(), serialize.getTotalStats().getIngestCurrent());
assertEquals(ingestStats.getStatsPerPipeline().size(), 1); public void testReadLegacyStream() throws IOException {
assertTrue(ingestStats.getStatsPerPipeline().containsKey("foo")); IngestStats.Stats totalStats = new IngestStats.Stats(50, 100, 200, 300);
List<IngestStats.PipelineStat> pipelineStats = createPipelineStats();
Map<String, IngestStats.Stats> left = ingestStats.getStatsPerPipeline(); //legacy output logic
Map<String, IngestStats.Stats> right = serialize.getStatsPerPipeline(); BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(VersionUtils.getPreviousVersion(Version.V_6_5_0));
totalStats.writeTo(out);
out.writeVInt(pipelineStats.size());
for (IngestStats.PipelineStat pipelineStat : pipelineStats) {
out.writeString(pipelineStat.getPipelineId());
pipelineStat.getStats().writeTo(out);
}
assertEquals(right.size(), 1); StreamInput in = out.bytes().streamInput();
assertTrue(right.containsKey("foo")); in.setVersion(VersionUtils.getPreviousVersion(Version.V_6_5_0));
assertEquals(left.size(), 1); IngestStats serializedStats = new IngestStats(in);
assertTrue(left.containsKey("foo")); IngestStats expectedStats = new IngestStats(totalStats, pipelineStats, Collections.emptyMap());
IngestStats.Stats leftStats = left.get("foo"); assertIngestStats(expectedStats, serializedStats, false);
IngestStats.Stats rightStats = right.get("foo"); }
assertEquals(leftStats.getIngestCount(), rightStats.getIngestCount());
assertEquals(leftStats.getIngestFailedCount(), rightStats.getIngestFailedCount()); private List<IngestStats.PipelineStat> createPipelineStats() {
assertEquals(leftStats.getIngestTimeInMillis(), rightStats.getIngestTimeInMillis()); IngestStats.PipelineStat pipeline1Stats = new IngestStats.PipelineStat("pipeline1", new IngestStats.Stats(3, 3, 3, 3));
assertEquals(leftStats.getIngestCurrent(), rightStats.getIngestCurrent()); IngestStats.PipelineStat pipeline2Stats = new IngestStats.PipelineStat("pipeline2", new IngestStats.Stats(47, 97, 197, 297));
IngestStats.PipelineStat pipeline3Stats = new IngestStats.PipelineStat("pipeline3", new IngestStats.Stats(0, 0, 0, 0));
return Stream.of(pipeline1Stats, pipeline2Stats, pipeline3Stats).collect(Collectors.toList());
}
private Map<String, List<IngestStats.ProcessorStat>> createProcessorStats(List<IngestStats.PipelineStat> pipelineStats){
assert(pipelineStats.size() >= 2);
IngestStats.ProcessorStat processor1Stat = new IngestStats.ProcessorStat("processor1", new IngestStats.Stats(1, 1, 1, 1));
IngestStats.ProcessorStat processor2Stat = new IngestStats.ProcessorStat("processor2", new IngestStats.Stats(2, 2, 2, 2));
IngestStats.ProcessorStat processor3Stat = new IngestStats.ProcessorStat("processor3", new IngestStats.Stats(47, 97, 197, 297));
//pipeline1 -> processor1,processor2; pipeline2 -> processor3
return MapBuilder.<String, List<IngestStats.ProcessorStat>>newMapBuilder()
.put(pipelineStats.get(0).getPipelineId(), Stream.of(processor1Stat, processor2Stat).collect(Collectors.toList()))
.put(pipelineStats.get(1).getPipelineId(), Collections.singletonList(processor3Stat))
.map();
} }
private IngestStats serialize(IngestStats stats) throws IOException { private IngestStats serialize(IngestStats stats) throws IOException {
@ -65,4 +91,48 @@ public class IngestStatsTests extends ESTestCase {
StreamInput in = out.bytes().streamInput(); StreamInput in = out.bytes().streamInput();
return new IngestStats(in); return new IngestStats(in);
} }
private void assertIngestStats(IngestStats ingestStats, IngestStats serializedStats, boolean expectProcessors){
assertNotSame(ingestStats, serializedStats);
assertNotSame(ingestStats.getTotalStats(), serializedStats.getTotalStats());
assertNotSame(ingestStats.getPipelineStats(), serializedStats.getPipelineStats());
assertNotSame(ingestStats.getProcessorStats(), serializedStats.getProcessorStats());
assertStats(ingestStats.getTotalStats(), serializedStats.getTotalStats());
assertEquals(ingestStats.getPipelineStats().size(), serializedStats.getPipelineStats().size());
for (IngestStats.PipelineStat serializedPipelineStat : serializedStats.getPipelineStats()) {
assertStats(getPipelineStats(ingestStats.getPipelineStats(), serializedPipelineStat.getPipelineId()),
serializedPipelineStat.getStats());
List<IngestStats.ProcessorStat> serializedProcessorStats =
serializedStats.getProcessorStats().get(serializedPipelineStat.getPipelineId());
List<IngestStats.ProcessorStat> processorStat = ingestStats.getProcessorStats().get(serializedPipelineStat.getPipelineId());
if(expectProcessors) {
if (processorStat != null) {
Iterator<IngestStats.ProcessorStat> it = processorStat.iterator();
//intentionally enforcing the identical ordering
for (IngestStats.ProcessorStat serializedProcessorStat : serializedProcessorStats) {
IngestStats.ProcessorStat ps = it.next();
assertEquals(ps.getName(), serializedProcessorStat.getName());
assertStats(ps.getStats(), serializedProcessorStat.getStats());
}
assertFalse(it.hasNext());
}
}else{
//pre 6.5 did not serialize any processor stats
assertNull(serializedProcessorStats);
}
}
}
private void assertStats(IngestStats.Stats fromObject, IngestStats.Stats fromStream) {
assertEquals(fromObject.getIngestCount(), fromStream.getIngestCount());
assertEquals(fromObject.getIngestFailedCount(), fromStream.getIngestFailedCount());
assertEquals(fromObject.getIngestTimeInMillis(), fromStream.getIngestTimeInMillis());
assertEquals(fromObject.getIngestCurrent(), fromStream.getIngestCurrent());
}
private IngestStats.Stats getPipelineStats(List<IngestStats.PipelineStat> pipelineStats, String id) {
return pipelineStats.stream().filter(p1 -> p1.getPipelineId().equals(id)).findFirst().map(p2 -> p2.getStats()).orElse(null);
}
} }

View File

@ -21,12 +21,13 @@ package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.time.Clock;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.LongSupplier;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -62,7 +63,7 @@ public class PipelineProcessorTests extends ESTestCase {
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("pipeline", pipelineId); config.put("name", pipelineId);
factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument); factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument);
assertEquals(testIngestDocument, invoked.get()); assertEquals(testIngestDocument, invoked.get());
} }
@ -72,7 +73,7 @@ public class PipelineProcessorTests extends ESTestCase {
IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("pipeline", "missingPipelineId"); config.put("name", "missingPipelineId");
IllegalStateException e = expectThrows( IllegalStateException e = expectThrows(
IllegalStateException.class, IllegalStateException.class,
() -> factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument) () -> factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument)
@ -88,21 +89,21 @@ public class PipelineProcessorTests extends ESTestCase {
IngestService ingestService = mock(IngestService.class); IngestService ingestService = mock(IngestService.class);
IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
Map<String, Object> outerConfig = new HashMap<>(); Map<String, Object> outerConfig = new HashMap<>();
outerConfig.put("pipeline", innerPipelineId); outerConfig.put("name", innerPipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
Pipeline outer = new Pipeline( Pipeline outer = new Pipeline(
outerPipelineId, null, null, outerPipelineId, null, null,
new CompoundProcessor(factory.create(Collections.emptyMap(), null, outerConfig)) new CompoundProcessor(factory.create(Collections.emptyMap(), null, outerConfig))
); );
Map<String, Object> innerConfig = new HashMap<>(); Map<String, Object> innerConfig = new HashMap<>();
innerConfig.put("pipeline", outerPipelineId); innerConfig.put("name", outerPipelineId);
Pipeline inner = new Pipeline( Pipeline inner = new Pipeline(
innerPipelineId, null, null, innerPipelineId, null, null,
new CompoundProcessor(factory.create(Collections.emptyMap(), null, innerConfig)) new CompoundProcessor(factory.create(Collections.emptyMap(), null, innerConfig))
); );
when(ingestService.getPipeline(outerPipelineId)).thenReturn(outer); when(ingestService.getPipeline(outerPipelineId)).thenReturn(outer);
when(ingestService.getPipeline(innerPipelineId)).thenReturn(inner); when(ingestService.getPipeline(innerPipelineId)).thenReturn(inner);
outerConfig.put("pipeline", innerPipelineId); outerConfig.put("name", innerPipelineId);
ElasticsearchException e = expectThrows( ElasticsearchException e = expectThrows(
ElasticsearchException.class, ElasticsearchException.class,
() -> factory.create(Collections.emptyMap(), null, outerConfig).execute(testIngestDocument) () -> factory.create(Collections.emptyMap(), null, outerConfig).execute(testIngestDocument)
@ -117,7 +118,7 @@ public class PipelineProcessorTests extends ESTestCase {
IngestService ingestService = mock(IngestService.class); IngestService ingestService = mock(IngestService.class);
IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
Map<String, Object> outerConfig = new HashMap<>(); Map<String, Object> outerConfig = new HashMap<>();
outerConfig.put("pipeline", innerPipelineId); outerConfig.put("name", innerPipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
Pipeline inner = new Pipeline( Pipeline inner = new Pipeline(
innerPipelineId, null, null, new CompoundProcessor() innerPipelineId, null, null, new CompoundProcessor()
@ -136,22 +137,22 @@ public class PipelineProcessorTests extends ESTestCase {
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
Map<String, Object> pipeline1ProcessorConfig = new HashMap<>(); Map<String, Object> pipeline1ProcessorConfig = new HashMap<>();
pipeline1ProcessorConfig.put("pipeline", pipeline2Id); pipeline1ProcessorConfig.put("name", pipeline2Id);
PipelineProcessor pipeline1Processor = factory.create(Collections.emptyMap(), null, pipeline1ProcessorConfig); PipelineProcessor pipeline1Processor = factory.create(Collections.emptyMap(), null, pipeline1ProcessorConfig);
Map<String, Object> pipeline2ProcessorConfig = new HashMap<>(); Map<String, Object> pipeline2ProcessorConfig = new HashMap<>();
pipeline2ProcessorConfig.put("pipeline", pipeline3Id); pipeline2ProcessorConfig.put("name", pipeline3Id);
PipelineProcessor pipeline2Processor = factory.create(Collections.emptyMap(), null, pipeline2ProcessorConfig); PipelineProcessor pipeline2Processor = factory.create(Collections.emptyMap(), null, pipeline2ProcessorConfig);
Clock clock = mock(Clock.class); LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(clock.millis()).thenReturn(0L).thenReturn(0L); when(relativeTimeProvider.getAsLong()).thenReturn(0L);
Pipeline pipeline1 = new Pipeline( Pipeline pipeline1 = new Pipeline(
pipeline1Id, null, null, new CompoundProcessor(pipeline1Processor), clock pipeline1Id, null, null, new CompoundProcessor(pipeline1Processor), relativeTimeProvider
); );
String key1 = randomAlphaOfLength(10); String key1 = randomAlphaOfLength(10);
clock = mock(Clock.class); relativeTimeProvider = mock(LongSupplier.class);
when(clock.millis()).thenReturn(0L).thenReturn(3L); when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(3));
Pipeline pipeline2 = new Pipeline( Pipeline pipeline2 = new Pipeline(
pipeline2Id, null, null, new CompoundProcessor(true, pipeline2Id, null, null, new CompoundProcessor(true,
Arrays.asList( Arrays.asList(
@ -160,15 +161,15 @@ public class PipelineProcessorTests extends ESTestCase {
}), }),
pipeline2Processor), pipeline2Processor),
Collections.emptyList()), Collections.emptyList()),
clock relativeTimeProvider
); );
clock = mock(Clock.class); relativeTimeProvider = mock(LongSupplier.class);
when(clock.millis()).thenReturn(0L).thenReturn(2L); when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(2));
Pipeline pipeline3 = new Pipeline( Pipeline pipeline3 = new Pipeline(
pipeline3Id, null, null, new CompoundProcessor( pipeline3Id, null, null, new CompoundProcessor(
new TestProcessor(ingestDocument -> { new TestProcessor(ingestDocument -> {
throw new RuntimeException("error"); throw new RuntimeException("error");
})), clock })), relativeTimeProvider
); );
when(ingestService.getPipeline(pipeline1Id)).thenReturn(pipeline1); when(ingestService.getPipeline(pipeline1Id)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipeline2Id)).thenReturn(pipeline2); when(ingestService.getPipeline(pipeline2Id)).thenReturn(pipeline2);

View File

@ -21,17 +21,22 @@ package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.action.ingest.SimulateProcessorResult;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Before; import org.junit.Before;
import org.mockito.Mockito;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_MESSAGE_FIELD; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_MESSAGE_FIELD;
import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD;
@ -39,10 +44,11 @@ import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TY
import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; import static org.elasticsearch.ingest.TrackingResultProcessor.decorate;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -50,13 +56,11 @@ public class TrackingResultProcessorTests extends ESTestCase {
private IngestDocument ingestDocument; private IngestDocument ingestDocument;
private List<SimulateProcessorResult> resultList; private List<SimulateProcessorResult> resultList;
private Set<PipelineProcessor> pipelinesSeen;
@Before @Before
public void init() { public void init() {
ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
resultList = new ArrayList<>(); resultList = new ArrayList<>();
pipelinesSeen = Collections.newSetFromMap(new IdentityHashMap<>());
} }
public void testActualProcessor() throws Exception { public void testActualProcessor() throws Exception {
@ -76,9 +80,9 @@ public class TrackingResultProcessorTests extends ESTestCase {
public void testActualCompoundProcessorWithoutOnFailure() throws Exception { public void testActualCompoundProcessorWithoutOnFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed"); RuntimeException exception = new RuntimeException("processor failed");
TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; });
CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor); CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
try { try {
trackingProcessor.execute(ingestDocument); trackingProcessor.execute(ingestDocument);
@ -97,14 +101,14 @@ public class TrackingResultProcessorTests extends ESTestCase {
public void testActualCompoundProcessorWithOnFailure() throws Exception { public void testActualCompoundProcessorWithOnFailure() throws Exception {
RuntimeException exception = new RuntimeException("fail"); RuntimeException exception = new RuntimeException("fail");
TestProcessor failProcessor = new TestProcessor("fail", "test", ingestDocument -> { throw exception; }); TestProcessor failProcessor = new TestProcessor("fail", "test", ingestDocument -> { throw exception; });
TestProcessor onFailureProcessor = new TestProcessor("success", "test", ingestDocument -> {}); TestProcessor onFailureProcessor = new TestProcessor("success", "test", ingestDocument -> {});
CompoundProcessor actualProcessor = new CompoundProcessor(false, CompoundProcessor actualProcessor = new CompoundProcessor(false,
Arrays.asList(new CompoundProcessor(false, Arrays.asList(new CompoundProcessor(false,
Arrays.asList(failProcessor, onFailureProcessor), Arrays.asList(failProcessor, onFailureProcessor),
Arrays.asList(onFailureProcessor, failProcessor))), Arrays.asList(onFailureProcessor, failProcessor))),
Arrays.asList(onFailureProcessor)); Arrays.asList(onFailureProcessor));
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument); trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument); SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument);
@ -139,10 +143,10 @@ public class TrackingResultProcessorTests extends ESTestCase {
public void testActualCompoundProcessorWithIgnoreFailure() throws Exception { public void testActualCompoundProcessorWithIgnoreFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed"); RuntimeException exception = new RuntimeException("processor failed");
TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; });
CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor), CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor),
Collections.emptyList()); Collections.emptyList());
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument); trackingProcessor.execute(ingestDocument);
@ -154,11 +158,50 @@ public class TrackingResultProcessorTests extends ESTestCase {
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag()));
} }
public void testActualCompoundProcessorWithFalseConditional() throws Exception {
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG,
new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> false), Collections.emptyMap())),
new HashMap<>(ScriptModule.CORE_CONTEXTS)
);
CompoundProcessor compoundProcessor = new CompoundProcessor(
new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }),
new ConditionalProcessor(
randomAlphaOfLength(10),
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService,
new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key2, randomInt()); })),
new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); }));
CompoundProcessor trackingProcessor = decorate(compoundProcessor, resultList);
trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(compoundProcessor.getTag(), ingestDocument);
//the step for key 2 is never executed due to conditional and thus not part of the result set
assertThat(resultList.size(), equalTo(2));
assertTrue(resultList.get(0).getIngestDocument().hasField(key1));
assertFalse(resultList.get(0).getIngestDocument().hasField(key2));
assertFalse(resultList.get(0).getIngestDocument().hasField(key3));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertFalse(resultList.get(1).getIngestDocument().hasField(key2));
assertTrue(resultList.get(1).getIngestDocument().hasField(key3));
assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument()));
assertThat(resultList.get(1).getFailure(), nullValue());
assertThat(resultList.get(1).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessor() throws Exception { public void testActualPipelineProcessor() throws Exception {
String pipelineId = "pipeline1"; String pipelineId = "pipeline1";
IngestService ingestService = mock(IngestService.class); IngestService ingestService = mock(IngestService.class);
Map<String, Object> pipelineConfig = new HashMap<>(); Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("pipeline", pipelineId); pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10); String key1 = randomAlphaOfLength(10);
@ -176,13 +219,13 @@ public class TrackingResultProcessorTests extends ESTestCase {
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument); trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument);
verify(ingestService).getPipeline(pipelineId); verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(3)); assertThat(resultList.size(), equalTo(3));
assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1));
@ -198,13 +241,149 @@ public class TrackingResultProcessorTests extends ESTestCase {
assertThat(resultList.get(2).getProcessorTag(), nullValue()); assertThat(resultList.get(2).getProcessorTag(), nullValue());
} }
public void testActualPipelineProcessorWithTrueConditional() throws Exception {
String pipelineId1 = "pipeline1";
String pipelineId2 = "pipeline2";
IngestService ingestService = mock(IngestService.class);
Map<String, Object> pipelineConfig0 = new HashMap<>();
pipelineConfig0.put("name", pipelineId1);
Map<String, Object> pipelineConfig1 = new HashMap<>();
pipelineConfig1.put("name", pipelineId1);
Map<String, Object> pipelineConfig2 = new HashMap<>();
pipelineConfig2.put("name", pipelineId2);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG,
new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> true), Collections.emptyMap())),
new HashMap<>(ScriptModule.CORE_CONTEXTS)
);
Pipeline pipeline1 = new Pipeline(
pipelineId1, null, null, new CompoundProcessor(
new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }),
new ConditionalProcessor(
randomAlphaOfLength(10),
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService,
factory.create(Collections.emptyMap(), null, pipelineConfig2)),
new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key3, randomInt()); })
)
);
Pipeline pipeline2 = new Pipeline(
pipelineId2, null, null, new CompoundProcessor(
new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); })));
when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2);
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId2);
assertThat(resultList.size(), equalTo(3));
assertTrue(resultList.get(0).getIngestDocument().hasField(key1));
assertFalse(resultList.get(0).getIngestDocument().hasField(key2));
assertFalse(resultList.get(0).getIngestDocument().hasField(key3));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertTrue(resultList.get(1).getIngestDocument().hasField(key2));
assertFalse(resultList.get(1).getIngestDocument().hasField(key3));
assertThat(resultList.get(2).getIngestDocument(), equalTo(expectedResult.getIngestDocument()));
assertThat(resultList.get(2).getFailure(), nullValue());
assertThat(resultList.get(2).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessorWithFalseConditional() throws Exception {
String pipelineId1 = "pipeline1";
String pipelineId2 = "pipeline2";
IngestService ingestService = mock(IngestService.class);
Map<String, Object> pipelineConfig0 = new HashMap<>();
pipelineConfig0.put("name", pipelineId1);
Map<String, Object> pipelineConfig1 = new HashMap<>();
pipelineConfig1.put("name", pipelineId1);
Map<String, Object> pipelineConfig2 = new HashMap<>();
pipelineConfig2.put("name", pipelineId2);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10);
String key2 = randomAlphaOfLength(10);
String key3 = randomAlphaOfLength(10);
String scriptName = "conditionalScript";
ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG,
new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> false), Collections.emptyMap())),
new HashMap<>(ScriptModule.CORE_CONTEXTS)
);
Pipeline pipeline1 = new Pipeline(
pipelineId1, null, null, new CompoundProcessor(
new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }),
new ConditionalProcessor(
randomAlphaOfLength(10),
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService,
factory.create(Collections.emptyMap(), null, pipelineConfig2)),
new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key3, randomInt()); })
)
);
Pipeline pipeline2 = new Pipeline(
pipelineId2, null, null, new CompoundProcessor(
new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); })));
when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2);
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument);
verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1);
verify(ingestService, Mockito.never()).getPipeline(pipelineId2);
assertThat(resultList.size(), equalTo(2));
assertTrue(resultList.get(0).getIngestDocument().hasField(key1));
assertFalse(resultList.get(0).getIngestDocument().hasField(key2));
assertFalse(resultList.get(0).getIngestDocument().hasField(key3));
assertTrue(resultList.get(1).getIngestDocument().hasField(key1));
assertFalse(resultList.get(1).getIngestDocument().hasField(key2));
assertTrue(resultList.get(1).getIngestDocument().hasField(key3));
assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument()));
assertThat(resultList.get(1).getFailure(), nullValue());
assertThat(resultList.get(1).getProcessorTag(), nullValue());
}
public void testActualPipelineProcessorWithHandledFailure() throws Exception { public void testActualPipelineProcessorWithHandledFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed"); RuntimeException exception = new RuntimeException("processor failed");
String pipelineId = "pipeline1"; String pipelineId = "pipeline1";
IngestService ingestService = mock(IngestService.class); IngestService ingestService = mock(IngestService.class);
Map<String, Object> pipelineConfig = new HashMap<>(); Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("pipeline", pipelineId); pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10); String key1 = randomAlphaOfLength(10);
@ -226,13 +405,13 @@ public class TrackingResultProcessorTests extends ESTestCase {
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument); trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument);
verify(ingestService).getPipeline(pipelineId); verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(4)); assertThat(resultList.size(), equalTo(4));
assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1));
@ -253,50 +432,61 @@ public class TrackingResultProcessorTests extends ESTestCase {
} }
public void testActualPipelineProcessorWithCycle() throws Exception { public void testActualPipelineProcessorWithCycle() throws Exception {
String pipelineId = "pipeline1"; String pipelineId1 = "pipeline1";
String pipelineId2 = "pipeline2";
IngestService ingestService = mock(IngestService.class); IngestService ingestService = mock(IngestService.class);
Map<String, Object> pipelineConfig = new HashMap<>(); Map<String, Object> pipelineConfig0 = new HashMap<>();
pipelineConfig.put("pipeline", pipelineId); pipelineConfig0.put("name", pipelineId1);
Map<String, Object> pipelineConfig1 = new HashMap<>();
pipelineConfig1.put("name", pipelineId1);
Map<String, Object> pipelineConfig2 = new HashMap<>();
pipelineConfig2.put("name", pipelineId2);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); Pipeline pipeline1 = new Pipeline(
Pipeline pipeline = new Pipeline( pipelineId1, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, pipelineConfig2)));
pipelineId, null, null, new CompoundProcessor(pipelineProcessor)
);
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
Pipeline pipeline2 = new Pipeline(
pipelineId2, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, pipelineConfig1)));
when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1);
when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2);
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor);
IllegalStateException exception = expectThrows(IllegalStateException.class, CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
() -> decorate(actualProcessor, resultList, pipelinesSeen));
assertThat(exception.getMessage(), equalTo("Cycle detected for pipeline: pipeline1"));
}
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> trackingProcessor.execute(ingestDocument));
assertThat(exception.getCause(), instanceOf(IllegalArgumentException.class));
assertThat(exception.getCause().getCause(), instanceOf(IllegalStateException.class));
assertThat(exception.getMessage(), containsString("Cycle detected for pipeline: pipeline1"));
}
public void testActualPipelineProcessorRepeatedInvocation() throws Exception { public void testActualPipelineProcessorRepeatedInvocation() throws Exception {
String pipelineId = "pipeline1"; String pipelineId = "pipeline1";
IngestService ingestService = mock(IngestService.class); IngestService ingestService = mock(IngestService.class);
Map<String, Object> pipelineConfig = new HashMap<>(); Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("pipeline", pipelineId); pipelineConfig.put("name", pipelineId);
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
String key1 = randomAlphaOfLength(10); String key1 = randomAlphaOfLength(10);
PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig);
Pipeline pipeline = new Pipeline( Pipeline pipeline = new Pipeline(
pipelineId, null, null, new CompoundProcessor( pipelineId, null, null, new CompoundProcessor(
new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); })) new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); }))
); );
when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline);
CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor, pipelineProcessor); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor, pipelineProcessor);
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument); trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument);
verify(ingestService, times(2)).getPipeline(pipelineId); verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId);
assertThat(resultList.size(), equalTo(2)); assertThat(resultList.size(), equalTo(2));
assertThat(resultList.get(0).getIngestDocument(), not(equalTo(expectedResult.getIngestDocument()))); assertThat(resultList.get(0).getIngestDocument(), not(equalTo(expectedResult.getIngestDocument())));

View File

@ -68,6 +68,7 @@ import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException; import java.security.cert.CertificateException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -449,7 +450,7 @@ public abstract class ESRestTestCase extends ESTestCase {
} }
} }
private void wipeRollupJobs() throws IOException { private void wipeRollupJobs() throws IOException, InterruptedException {
Response response = adminClient().performRequest(new Request("GET", "/_xpack/rollup/job/_all")); Response response = adminClient().performRequest(new Request("GET", "/_xpack/rollup/job/_all"));
Map<String, Object> jobs = entityAsMap(response); Map<String, Object> jobs = entityAsMap(response);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -460,6 +461,29 @@ public abstract class ESRestTestCase extends ESTestCase {
return; return;
} }
for (Map<String, Object> jobConfig : jobConfigs) {
@SuppressWarnings("unchecked")
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
Request request = new Request("POST", "/_xpack/rollup/job/" + jobId + "/_stop");
request.addParameter("ignore", "404");
logger.debug("stopping rollup job [{}]", jobId);
adminClient().performRequest(request);
}
// TODO this is temporary until StopJob API gains the ability to block until stopped
awaitBusy(() -> {
Request request = new Request("GET", "/_xpack/rollup/job/_all");
try {
Response jobsResponse = adminClient().performRequest(request);
String body = EntityUtils.toString(jobsResponse.getEntity());
logger.error(body);
// If the body contains any of the non-stopped states, at least one job is not finished yet
return Arrays.stream(new String[]{"started", "aborting", "stopping", "indexing"}).noneMatch(body::contains);
} catch (IOException e) {
return false;
}
}, 10, TimeUnit.SECONDS);
for (Map<String, Object> jobConfig : jobConfigs) { for (Map<String, Object> jobConfig : jobConfigs) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id"); String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");

View File

@ -50,8 +50,8 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase {
public void testAutoFollow() throws Exception { public void testAutoFollow() throws Exception {
assumeFalse("windows is the worst", Constants.WINDOWS); assumeFalse("windows is the worst", Constants.WINDOWS);
if (runningAgainstLeaderCluster == false) { if (runningAgainstLeaderCluster == false) {
final Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); final Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern");
request.setJsonEntity("{\"leader_index_patterns\":[\"*\"]}"); request.setJsonEntity("{\"leader_index_patterns\":[\"*\"], \"leader_cluster\": \"leader_cluster\"}");
client().performRequest(request); client().performRequest(request);
// parse the logs and ensure that the auto-coordinator skipped coordination on the leader cluster // parse the logs and ensure that the auto-coordinator skipped coordination on the leader cluster
@ -64,7 +64,7 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase {
while (it.hasNext()) { while (it.hasNext()) {
final String line = it.next(); final String line = it.next();
if (line.matches(".*\\[WARN\\s*\\]\\[o\\.e\\.x\\.c\\.a\\.AutoFollowCoordinator\\s*\\] \\[node-0\\] " + if (line.matches(".*\\[WARN\\s*\\]\\[o\\.e\\.x\\.c\\.a\\.AutoFollowCoordinator\\s*\\] \\[node-0\\] " +
"failure occurred while fetching cluster state in leader cluster \\[leader_cluster\\]")) { "failure occurred while fetching cluster state for auto follow pattern \\[test_pattern\\]")) {
warn = true; warn = true;
break; break;
} }

View File

@ -146,14 +146,14 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
String disallowedIndex = "logs-us-20190101"; String disallowedIndex = "logs-us-20190101";
{ {
Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern");
request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}");
Exception e = expectThrows(ResponseException.class, () -> assertOK(client().performRequest(request))); Exception e = expectThrows(ResponseException.class, () -> assertOK(client().performRequest(request)));
assertThat(e.getMessage(), containsString("insufficient privileges to follow index [logs-*]")); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [logs-*]"));
} }
Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern");
request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"]}"); request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"], \"leader_cluster\": \"leader_cluster\"}");
assertOK(client().performRequest(request)); assertOK(client().performRequest(request));
try (RestClient leaderClient = buildLeaderClient()) { try (RestClient leaderClient = buildLeaderClient()) {
@ -185,7 +185,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase {
}); });
// Cleanup by deleting auto follow pattern and pause following: // Cleanup by deleting auto follow pattern and pause following:
request = new Request("DELETE", "/_ccr/auto_follow/leader_cluster"); request = new Request("DELETE", "/_ccr/auto_follow/test_pattern");
assertOK(client().performRequest(request)); assertOK(client().performRequest(request));
pauseFollow(allowedIndex); pauseFollow(allowedIndex);
} }

View File

@ -103,8 +103,8 @@ public class FollowIndexIT extends ESRestTestCase {
public void testAutoFollowPatterns() throws Exception { public void testAutoFollowPatterns() throws Exception {
assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster);
Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern");
request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}");
assertOK(client().performRequest(request)); assertOK(client().performRequest(request));
try (RestClient leaderClient = buildLeaderClient()) { try (RestClient leaderClient = buildLeaderClient()) {

View File

@ -21,29 +21,32 @@
- do: - do:
ccr.put_auto_follow_pattern: ccr.put_auto_follow_pattern:
leader_cluster: local name: my_pattern
body: body:
leader_cluster: local
leader_index_patterns: ['logs-*'] leader_index_patterns: ['logs-*']
max_concurrent_read_batches: 2 max_concurrent_read_batches: 2
- is_true: acknowledged - is_true: acknowledged
- do: - do:
ccr.get_auto_follow_pattern: ccr.get_auto_follow_pattern:
leader_cluster: local name: my_pattern
- match: { local.leader_index_patterns: ['logs-*'] } - match: { my_pattern.leader_cluster: 'local' }
- match: { local.max_concurrent_read_batches: 2 } - match: { my_pattern.leader_index_patterns: ['logs-*'] }
- match: { my_pattern.max_concurrent_read_batches: 2 }
- do: - do:
ccr.get_auto_follow_pattern: {} ccr.get_auto_follow_pattern: {}
- match: { local.leader_index_patterns: ['logs-*'] } - match: { my_pattern.leader_cluster: 'local' }
- match: { local.max_concurrent_read_batches: 2 } - match: { my_pattern.leader_index_patterns: ['logs-*'] }
- match: { my_pattern.max_concurrent_read_batches: 2 }
- do: - do:
ccr.delete_auto_follow_pattern: ccr.delete_auto_follow_pattern:
leader_cluster: local name: my_pattern
- is_true: acknowledged - is_true: acknowledged
- do: - do:
catch: missing catch: missing
ccr.get_auto_follow_pattern: ccr.get_auto_follow_pattern:
leader_cluster: local name: my_pattern

View File

@ -47,6 +47,7 @@ import java.util.TreeMap;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
/** /**
* A component that runs only on the elected master node and follows leader indices automatically * A component that runs only on the elected master node and follows leader indices automatically
@ -105,19 +106,19 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
synchronized void updateStats(List<AutoFollowResult> results) { synchronized void updateStats(List<AutoFollowResult> results) {
for (AutoFollowResult result : results) { for (AutoFollowResult result : results) {
if (result.clusterStateFetchException != null) { if (result.clusterStateFetchException != null) {
recentAutoFollowErrors.put(result.clusterAlias, recentAutoFollowErrors.put(result.autoFollowPatternName,
new ElasticsearchException(result.clusterStateFetchException)); new ElasticsearchException(result.clusterStateFetchException));
numberOfFailedRemoteClusterStateRequests++; numberOfFailedRemoteClusterStateRequests++;
LOGGER.warn(new ParameterizedMessage("failure occurred while fetching cluster state in leader cluster [{}]", LOGGER.warn(new ParameterizedMessage("failure occurred while fetching cluster state for auto follow pattern [{}]",
result.clusterAlias), result.clusterStateFetchException); result.autoFollowPatternName), result.clusterStateFetchException);
} else { } else {
for (Map.Entry<Index, Exception> entry : result.autoFollowExecutionResults.entrySet()) { for (Map.Entry<Index, Exception> entry : result.autoFollowExecutionResults.entrySet()) {
if (entry.getValue() != null) { if (entry.getValue() != null) {
numberOfFailedIndicesAutoFollowed++; numberOfFailedIndicesAutoFollowed++;
recentAutoFollowErrors.put(result.clusterAlias + ":" + entry.getKey().getName(), recentAutoFollowErrors.put(result.autoFollowPatternName + ":" + entry.getKey().getName(),
ExceptionsHelper.convertToElastic(entry.getValue())); ExceptionsHelper.convertToElastic(entry.getValue()));
LOGGER.warn(new ParameterizedMessage("failure occurred while auto following index [{}] in leader cluster [{}]", LOGGER.warn(new ParameterizedMessage("failure occurred while auto following index [{}] for auto follow " +
entry.getKey(), result.clusterAlias), entry.getValue()); "pattern [{}]", entry.getKey(), result.autoFollowPatternName), entry.getValue());
} else { } else {
numberOfSuccessfulIndicesAutoFollowed++; numberOfSuccessfulIndicesAutoFollowed++;
} }
@ -243,34 +244,45 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
int i = 0; int i = 0;
for (Map.Entry<String, AutoFollowPattern> entry : autoFollowMetadata.getPatterns().entrySet()) { for (Map.Entry<String, AutoFollowPattern> entry : autoFollowMetadata.getPatterns().entrySet()) {
final int slot = i; final int slot = i;
final String clusterAlias = entry.getKey(); final String autoFollowPattenName = entry.getKey();
final AutoFollowPattern autoFollowPattern = entry.getValue(); final AutoFollowPattern autoFollowPattern = entry.getValue();
final String leaderCluster = autoFollowPattern.getLeaderCluster();
Map<String, String> headers = autoFollowMetadata.getHeaders().get(clusterAlias); Map<String, String> headers = autoFollowMetadata.getHeaders().get(autoFollowPattenName);
getLeaderClusterState(headers, clusterAlias, (leaderClusterState, e) -> { getLeaderClusterState(headers, leaderCluster, (leaderClusterState, e) -> {
if (leaderClusterState != null) { if (leaderClusterState != null) {
assert e == null; assert e == null;
final List<String> followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); final List<String> followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(autoFollowPattenName);
final List<Index> leaderIndicesToFollow = getLeaderIndicesToFollow(clusterAlias, autoFollowPattern, final List<Index> leaderIndicesToFollow = getLeaderIndicesToFollow(leaderCluster, autoFollowPattern,
leaderClusterState, followerClusterState, followedIndices); leaderClusterState, followerClusterState, followedIndices);
if (leaderIndicesToFollow.isEmpty()) { if (leaderIndicesToFollow.isEmpty()) {
finalise(slot, new AutoFollowResult(clusterAlias)); finalise(slot, new AutoFollowResult(autoFollowPattenName));
} else { } else {
List<Tuple<String, AutoFollowPattern>> patternsForTheSameLeaderCluster = autoFollowMetadata.getPatterns()
.entrySet().stream()
.filter(item -> autoFollowPattenName.equals(item.getKey()) == false)
.filter(item -> leaderCluster.equals(item.getValue().getLeaderCluster()))
.map(item -> new Tuple<>(item.getKey(), item.getValue()))
.collect(Collectors.toList());
Consumer<AutoFollowResult> resultHandler = result -> finalise(slot, result); Consumer<AutoFollowResult> resultHandler = result -> finalise(slot, result);
checkAutoFollowPattern(clusterAlias, autoFollowPattern, leaderIndicesToFollow, headers, resultHandler); checkAutoFollowPattern(autoFollowPattenName, leaderCluster, autoFollowPattern, leaderIndicesToFollow, headers,
patternsForTheSameLeaderCluster, resultHandler);
} }
} else { } else {
finalise(slot, new AutoFollowResult(clusterAlias, e)); finalise(slot, new AutoFollowResult(autoFollowPattenName, e));
} }
}); });
i++; i++;
} }
} }
private void checkAutoFollowPattern(String clusterAlias, private void checkAutoFollowPattern(String autoFollowPattenName,
String clusterAlias,
AutoFollowPattern autoFollowPattern, AutoFollowPattern autoFollowPattern,
List<Index> leaderIndicesToFollow, List<Index> leaderIndicesToFollow,
Map<String, String> headers, Map<String, String> headers,
List<Tuple<String, AutoFollowPattern>> patternsForTheSameLeaderCluster,
Consumer<AutoFollowResult> resultHandler) { Consumer<AutoFollowResult> resultHandler) {
final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size()); final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size());
@ -278,16 +290,31 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
for (int i = 0; i < leaderIndicesToFollow.size(); i++) { for (int i = 0; i < leaderIndicesToFollow.size(); i++) {
final Index indexToFollow = leaderIndicesToFollow.get(i); final Index indexToFollow = leaderIndicesToFollow.get(i);
final int slot = i; final int slot = i;
followLeaderIndex(clusterAlias, indexToFollow, autoFollowPattern, headers, error -> {
results.set(slot, new Tuple<>(indexToFollow, error)); List<String> otherMatchingPatterns = patternsForTheSameLeaderCluster.stream()
.filter(otherPattern -> otherPattern.v2().match(indexToFollow.getName()))
.map(Tuple::v1)
.collect(Collectors.toList());
if (otherMatchingPatterns.size() != 0) {
results.set(slot, new Tuple<>(indexToFollow, new ElasticsearchException("index to follow [" + indexToFollow.getName() +
"] for pattern [" + autoFollowPattenName + "] matches with other patterns " + otherMatchingPatterns + "")));
if (leaderIndicesCountDown.countDown()) { if (leaderIndicesCountDown.countDown()) {
resultHandler.accept(new AutoFollowResult(clusterAlias, results.asList())); resultHandler.accept(new AutoFollowResult(autoFollowPattenName, results.asList()));
} }
}); } else {
followLeaderIndex(autoFollowPattenName, clusterAlias, indexToFollow, autoFollowPattern, headers, error -> {
results.set(slot, new Tuple<>(indexToFollow, error));
if (leaderIndicesCountDown.countDown()) {
resultHandler.accept(new AutoFollowResult(autoFollowPattenName, results.asList()));
}
});
}
} }
} }
private void followLeaderIndex(String clusterAlias, private void followLeaderIndex(String autoFollowPattenName,
String clusterAlias,
Index indexToFollow, Index indexToFollow,
AutoFollowPattern pattern, AutoFollowPattern pattern,
Map<String,String> headers, Map<String,String> headers,
@ -313,7 +340,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
// This function updates the auto follow metadata in the cluster to record that the leader index has been followed: // This function updates the auto follow metadata in the cluster to record that the leader index has been followed:
// (so that we do not try to follow it in subsequent auto follow runs) // (so that we do not try to follow it in subsequent auto follow runs)
Function<ClusterState, ClusterState> function = recordLeaderIndexAsFollowFunction(clusterAlias, indexToFollow); Function<ClusterState, ClusterState> function = recordLeaderIndexAsFollowFunction(autoFollowPattenName, indexToFollow);
// The coordinator always runs on the elected master node, so we can update cluster state here: // The coordinator always runs on the elected master node, so we can update cluster state here:
updateAutoFollowMetadata(function, onResult); updateAutoFollowMetadata(function, onResult);
}; };
@ -356,12 +383,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
} }
} }
static Function<ClusterState, ClusterState> recordLeaderIndexAsFollowFunction(String clusterAlias, static Function<ClusterState, ClusterState> recordLeaderIndexAsFollowFunction(String name,
Index indexToFollow) { Index indexToFollow) {
return currentState -> { return currentState -> {
AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE);
Map<String, List<String>> newFollowedIndexUUIDS = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); Map<String, List<String>> newFollowedIndexUUIDS = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs());
newFollowedIndexUUIDS.compute(clusterAlias, (key, existingUUIDs) -> { newFollowedIndexUUIDS.compute(name, (key, existingUUIDs) -> {
assert existingUUIDs != null; assert existingUUIDs != null;
List<String> newUUIDs = new ArrayList<>(existingUUIDs); List<String> newUUIDs = new ArrayList<>(existingUUIDs);
newUUIDs.add(indexToFollow.getUUID()); newUUIDs.add(indexToFollow.getUUID());
@ -405,12 +432,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
static class AutoFollowResult { static class AutoFollowResult {
final String clusterAlias; final String autoFollowPatternName;
final Exception clusterStateFetchException; final Exception clusterStateFetchException;
final Map<Index, Exception> autoFollowExecutionResults; final Map<Index, Exception> autoFollowExecutionResults;
AutoFollowResult(String clusterAlias, List<Tuple<Index, Exception>> results) { AutoFollowResult(String autoFollowPatternName, List<Tuple<Index, Exception>> results) {
this.clusterAlias = clusterAlias; this.autoFollowPatternName = autoFollowPatternName;
Map<Index, Exception> autoFollowExecutionResults = new HashMap<>(); Map<Index, Exception> autoFollowExecutionResults = new HashMap<>();
for (Tuple<Index, Exception> result : results) { for (Tuple<Index, Exception> result : results) {
@ -421,14 +448,14 @@ public class AutoFollowCoordinator implements ClusterStateApplier {
this.autoFollowExecutionResults = Collections.unmodifiableMap(autoFollowExecutionResults); this.autoFollowExecutionResults = Collections.unmodifiableMap(autoFollowExecutionResults);
} }
AutoFollowResult(String clusterAlias, Exception e) { AutoFollowResult(String autoFollowPatternName, Exception e) {
this.clusterAlias = clusterAlias; this.autoFollowPatternName = autoFollowPatternName;
this.clusterStateFetchException = e; this.clusterStateFetchException = e;
this.autoFollowExecutionResults = Collections.emptyMap(); this.autoFollowExecutionResults = Collections.emptyMap();
} }
AutoFollowResult(String clusterAlias) { AutoFollowResult(String autoFollowPatternName) {
this(clusterAlias, (Exception) null); this(autoFollowPatternName, (Exception) null);
} }
} }
} }

View File

@ -39,6 +39,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
@ -67,6 +68,8 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_POLL_TIMEOUT; private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_POLL_TIMEOUT;
private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE; private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE;
private long relativeStartNanos;
public Request(ShardId shardId, String expectedHistoryUUID) { public Request(ShardId shardId, String expectedHistoryUUID) {
super(shardId.getIndexName()); super(shardId.getIndexName());
this.shardId = shardId; this.shardId = shardId;
@ -142,6 +145,9 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
expectedHistoryUUID = in.readString(); expectedHistoryUUID = in.readString();
pollTimeout = in.readTimeValue(); pollTimeout = in.readTimeValue();
maxBatchSize = new ByteSizeValue(in); maxBatchSize = new ByteSizeValue(in);
// Starting the clock in order to know how much time is spent on fetching operations:
relativeStartNanos = System.nanoTime();
} }
@Override @Override
@ -220,6 +226,12 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
return operations; return operations;
} }
private long tookInMillis;
public long getTookInMillis() {
return tookInMillis;
}
Response() { Response() {
} }
@ -228,13 +240,15 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
final long globalCheckpoint, final long globalCheckpoint,
final long maxSeqNo, final long maxSeqNo,
final long maxSeqNoOfUpdatesOrDeletes, final long maxSeqNoOfUpdatesOrDeletes,
final Translog.Operation[] operations) { final Translog.Operation[] operations,
final long tookInMillis) {
this.mappingVersion = mappingVersion; this.mappingVersion = mappingVersion;
this.globalCheckpoint = globalCheckpoint; this.globalCheckpoint = globalCheckpoint;
this.maxSeqNo = maxSeqNo; this.maxSeqNo = maxSeqNo;
this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes;
this.operations = operations; this.operations = operations;
this.tookInMillis = tookInMillis;
} }
@Override @Override
@ -245,6 +259,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
maxSeqNo = in.readZLong(); maxSeqNo = in.readZLong();
maxSeqNoOfUpdatesOrDeletes = in.readZLong(); maxSeqNoOfUpdatesOrDeletes = in.readZLong();
operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new);
tookInMillis = in.readVLong();
} }
@Override @Override
@ -255,6 +270,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
out.writeZLong(maxSeqNo); out.writeZLong(maxSeqNo);
out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeZLong(maxSeqNoOfUpdatesOrDeletes);
out.writeArray(Translog.Operation::writeOperation, operations); out.writeArray(Translog.Operation::writeOperation, operations);
out.writeVLong(tookInMillis);
} }
@Override @Override
@ -266,12 +282,14 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
globalCheckpoint == that.globalCheckpoint && globalCheckpoint == that.globalCheckpoint &&
maxSeqNo == that.maxSeqNo && maxSeqNo == that.maxSeqNo &&
maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes && maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes &&
Arrays.equals(operations, that.operations); Arrays.equals(operations, that.operations) &&
tookInMillis == that.tookInMillis;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, Arrays.hashCode(operations)); return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes,
Arrays.hashCode(operations), tookInMillis);
} }
} }
@ -308,7 +326,7 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
request.getMaxBatchSize()); request.getMaxBatchSize());
// must capture after after snapshotting operations to ensure this MUS is at least the highest MUS of any of these operations. // must capture after after snapshotting operations to ensure this MUS is at least the highest MUS of any of these operations.
final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes();
return getResponse(mappingVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations); return getResponse(mappingVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations, request.relativeStartNanos);
} }
@Override @Override
@ -373,7 +391,8 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion(); clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion();
final SeqNoStats latestSeqNoStats = indexShard.seqNoStats(); final SeqNoStats latestSeqNoStats = indexShard.seqNoStats();
final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes();
listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY)); listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY,
request.relativeStartNanos));
} catch (final Exception caught) { } catch (final Exception caught) {
caught.addSuppressed(e); caught.addSuppressed(e);
listener.onFailure(caught); listener.onFailure(caught);
@ -459,8 +478,11 @@ public class ShardChangesAction extends Action<ShardChangesAction.Response> {
} }
static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats,
final long maxSeqNoOfUpdates, final Translog.Operation[] operations) { final long maxSeqNoOfUpdates, final Translog.Operation[] operations, long relativeStartNanos) {
return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, operations); long tookInNanos = System.nanoTime() - relativeStartNanos;
long tookInMillis = TimeUnit.NANOSECONDS.toMillis(tookInNanos);
return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates,
operations, tookInMillis);
} }
} }

View File

@ -71,6 +71,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
private int numConcurrentReads = 0; private int numConcurrentReads = 0;
private int numConcurrentWrites = 0; private int numConcurrentWrites = 0;
private long currentMappingVersion = 0; private long currentMappingVersion = 0;
private long totalFetchTookTimeMillis = 0;
private long totalFetchTimeMillis = 0; private long totalFetchTimeMillis = 0;
private long numberOfSuccessfulFetches = 0; private long numberOfSuccessfulFetches = 0;
private long numberOfFailedFetches = 0; private long numberOfFailedFetches = 0;
@ -238,6 +239,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
fetchExceptions.remove(from); fetchExceptions.remove(from);
if (response.getOperations().length > 0) { if (response.getOperations().length > 0) {
// do not count polls against fetch stats // do not count polls against fetch stats
totalFetchTookTimeMillis += response.getTookInMillis();
totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime);
numberOfSuccessfulFetches++; numberOfSuccessfulFetches++;
operationsReceived += response.getOperations().length; operationsReceived += response.getOperations().length;
@ -449,6 +451,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
buffer.size(), buffer.size(),
currentMappingVersion, currentMappingVersion,
totalFetchTimeMillis, totalFetchTimeMillis,
totalFetchTookTimeMillis,
numberOfSuccessfulFetches, numberOfSuccessfulFetches,
numberOfFailedFetches, numberOfFailedFetches,
operationsReceived, operationsReceived,

View File

@ -54,7 +54,7 @@ public class TransportDeleteAutoFollowPatternAction extends
protected void masterOperation(DeleteAutoFollowPatternAction.Request request, protected void masterOperation(DeleteAutoFollowPatternAction.Request request,
ClusterState state, ClusterState state,
ActionListener<AcknowledgedResponse> listener) throws Exception { ActionListener<AcknowledgedResponse> listener) throws Exception {
clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderCluster(), clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getName(),
new AckedClusterStateUpdateTask<AcknowledgedResponse>(request, listener) { new AckedClusterStateUpdateTask<AcknowledgedResponse>(request, listener) {
@Override @Override
@ -72,23 +72,23 @@ public class TransportDeleteAutoFollowPatternAction extends
static ClusterState innerDelete(DeleteAutoFollowPatternAction.Request request, ClusterState currentState) { static ClusterState innerDelete(DeleteAutoFollowPatternAction.Request request, ClusterState currentState) {
AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE);
if (currentAutoFollowMetadata == null) { if (currentAutoFollowMetadata == null) {
throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", throw new ResourceNotFoundException("auto-follow pattern [{}] is missing",
request.getLeaderCluster()); request.getName());
} }
Map<String, AutoFollowPattern> patterns = currentAutoFollowMetadata.getPatterns(); Map<String, AutoFollowPattern> patterns = currentAutoFollowMetadata.getPatterns();
AutoFollowPattern autoFollowPatternToRemove = patterns.get(request.getLeaderCluster()); AutoFollowPattern autoFollowPatternToRemove = patterns.get(request.getName());
if (autoFollowPatternToRemove == null) { if (autoFollowPatternToRemove == null) {
throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", throw new ResourceNotFoundException("auto-follow pattern [{}] is missing",
request.getLeaderCluster()); request.getName());
} }
final Map<String, AutoFollowPattern> patternsCopy = new HashMap<>(patterns); final Map<String, AutoFollowPattern> patternsCopy = new HashMap<>(patterns);
final Map<String, List<String>> followedLeaderIndexUUIDSCopy = final Map<String, List<String>> followedLeaderIndexUUIDSCopy =
new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs());
final Map<String, Map<String, String>> headers = new HashMap<>(currentAutoFollowMetadata.getHeaders()); final Map<String, Map<String, String>> headers = new HashMap<>(currentAutoFollowMetadata.getHeaders());
patternsCopy.remove(request.getLeaderCluster()); patternsCopy.remove(request.getName());
followedLeaderIndexUUIDSCopy.remove(request.getLeaderCluster()); followedLeaderIndexUUIDSCopy.remove(request.getName());
headers.remove(request.getLeaderCluster()); headers.remove(request.getName());
AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(patternsCopy, followedLeaderIndexUUIDSCopy, headers); AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(patternsCopy, followedLeaderIndexUUIDSCopy, headers);
ClusterState.Builder newState = ClusterState.builder(currentState); ClusterState.Builder newState = ClusterState.builder(currentState);

View File

@ -55,7 +55,7 @@ public class TransportGetAutoFollowPatternAction
protected void masterOperation(GetAutoFollowPatternAction.Request request, protected void masterOperation(GetAutoFollowPatternAction.Request request,
ClusterState state, ClusterState state,
ActionListener<GetAutoFollowPatternAction.Response> listener) throws Exception { ActionListener<GetAutoFollowPatternAction.Response> listener) throws Exception {
Map<String, AutoFollowPattern> autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getLeaderCluster()); Map<String, AutoFollowPattern> autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getName());
listener.onResponse(new GetAutoFollowPatternAction.Response(autoFollowPatterns)); listener.onResponse(new GetAutoFollowPatternAction.Response(autoFollowPatterns));
} }
@ -64,20 +64,20 @@ public class TransportGetAutoFollowPatternAction
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
} }
static Map<String, AutoFollowPattern> getAutoFollowPattern(MetaData metaData, String leaderClusterAlias) { static Map<String, AutoFollowPattern> getAutoFollowPattern(MetaData metaData, String name) {
AutoFollowMetadata autoFollowMetadata = metaData.custom(AutoFollowMetadata.TYPE); AutoFollowMetadata autoFollowMetadata = metaData.custom(AutoFollowMetadata.TYPE);
if (autoFollowMetadata == null) { if (autoFollowMetadata == null) {
throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); throw new ResourceNotFoundException("auto-follow pattern [{}] is missing", name);
} }
if (leaderClusterAlias == null) { if (name == null) {
return autoFollowMetadata.getPatterns(); return autoFollowMetadata.getPatterns();
} }
AutoFollowPattern autoFollowPattern = autoFollowMetadata.getPatterns().get(leaderClusterAlias); AutoFollowPattern autoFollowPattern = autoFollowMetadata.getPatterns().get(name);
if (autoFollowPattern == null) { if (autoFollowPattern == null) {
throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); throw new ResourceNotFoundException("auto-follow pattern [{}] is missing", name);
} }
return Collections.singletonMap(leaderClusterAlias, autoFollowPattern); return Collections.singletonMap(name, autoFollowPattern);
} }
} }

View File

@ -135,14 +135,14 @@ public class TransportPutAutoFollowPatternAction extends
headers = new HashMap<>(); headers = new HashMap<>();
} }
AutoFollowPattern previousPattern = patterns.get(request.getLeaderCluster()); AutoFollowPattern previousPattern = patterns.get(request.getName());
final List<String> followedIndexUUIDs; final List<String> followedIndexUUIDs;
if (followedLeaderIndices.containsKey(request.getLeaderCluster())) { if (followedLeaderIndices.containsKey(request.getName())) {
followedIndexUUIDs = new ArrayList<>(followedLeaderIndices.get(request.getLeaderCluster())); followedIndexUUIDs = new ArrayList<>(followedLeaderIndices.get(request.getName()));
} else { } else {
followedIndexUUIDs = new ArrayList<>(); followedIndexUUIDs = new ArrayList<>();
} }
followedLeaderIndices.put(request.getLeaderCluster(), followedIndexUUIDs); followedLeaderIndices.put(request.getName(), followedIndexUUIDs);
// Mark existing leader indices as already auto followed: // Mark existing leader indices as already auto followed:
if (previousPattern != null) { if (previousPattern != null) {
markExistingIndicesAsAutoFollowedForNewPatterns(request.getLeaderIndexPatterns(), leaderClusterState.metaData(), markExistingIndicesAsAutoFollowedForNewPatterns(request.getLeaderIndexPatterns(), leaderClusterState.metaData(),
@ -153,10 +153,11 @@ public class TransportPutAutoFollowPatternAction extends
} }
if (filteredHeaders != null) { if (filteredHeaders != null) {
headers.put(request.getLeaderCluster(), filteredHeaders); headers.put(request.getName(), filteredHeaders);
} }
AutoFollowPattern autoFollowPattern = new AutoFollowPattern( AutoFollowPattern autoFollowPattern = new AutoFollowPattern(
request.getLeaderCluster(),
request.getLeaderIndexPatterns(), request.getLeaderIndexPatterns(),
request.getFollowIndexNamePattern(), request.getFollowIndexNamePattern(),
request.getMaxBatchOperationCount(), request.getMaxBatchOperationCount(),
@ -166,7 +167,7 @@ public class TransportPutAutoFollowPatternAction extends
request.getMaxWriteBufferSize(), request.getMaxWriteBufferSize(),
request.getMaxRetryDelay(), request.getMaxRetryDelay(),
request.getPollTimeout()); request.getPollTimeout());
patterns.put(request.getLeaderCluster(), autoFollowPattern); patterns.put(request.getName(), autoFollowPattern);
ClusterState.Builder newState = ClusterState.builder(localState); ClusterState.Builder newState = ClusterState.builder(localState);
newState.metaData(MetaData.builder(localState.getMetaData()) newState.metaData(MetaData.builder(localState.getMetaData())
.putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, followedLeaderIndices, headers)) .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, followedLeaderIndices, headers))

View File

@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicenseUtils;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -168,6 +169,7 @@ public final class TransportPutFollowAction
settingsBuilder.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); settingsBuilder.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID());
settingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followIndex); settingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followIndex);
settingsBuilder.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); settingsBuilder.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true);
settingsBuilder.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
imdBuilder.settings(settingsBuilder); imdBuilder.settings(settingsBuilder);
// Copy mappings from leader IMD to follow IMD // Copy mappings from leader IMD to follow IMD

View File

@ -240,6 +240,9 @@ public class TransportResumeFollowAction extends HandledTransportAction<ResumeFo
if (leaderIndex.getSettings().getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) == false) { if (leaderIndex.getSettings().getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) == false) {
throw new IllegalArgumentException("leader index [" + leaderIndexName + "] does not have soft deletes enabled"); throw new IllegalArgumentException("leader index [" + leaderIndexName + "] does not have soft deletes enabled");
} }
if (followIndex.getSettings().getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) == false) {
throw new IllegalArgumentException("follower index [" + request.getFollowerIndex() + "] does not have soft deletes enabled");
}
if (leaderIndex.getNumberOfShards() != followIndex.getNumberOfShards()) { if (leaderIndex.getNumberOfShards() != followIndex.getNumberOfShards()) {
throw new IllegalArgumentException("leader index primary shards [" + leaderIndex.getNumberOfShards() + throw new IllegalArgumentException("leader index primary shards [" + leaderIndex.getNumberOfShards() +
"] does not match with the number of shards of the follow index [" + followIndex.getNumberOfShards() + "]"); "] does not match with the number of shards of the follow index [" + followIndex.getNumberOfShards() + "]");
@ -382,7 +385,6 @@ public class TransportResumeFollowAction extends HandledTransportAction<ResumeFo
whiteListedSettings.add(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING); whiteListedSettings.add(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING);
whiteListedSettings.add(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING); whiteListedSettings.add(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING);
whiteListedSettings.add(IndexSettings.INDEX_SOFT_DELETES_SETTING);
whiteListedSettings.add(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING); whiteListedSettings.add(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING);
WHITE_LISTED_SETTINGS = Collections.unmodifiableSet(whiteListedSettings); WHITE_LISTED_SETTINGS = Collections.unmodifiableSet(whiteListedSettings);

View File

@ -49,6 +49,9 @@ public final class FollowingEngine extends InternalEngine {
if (CcrSettings.CCR_FOLLOWING_INDEX_SETTING.get(engineConfig.getIndexSettings().getSettings()) == false) { if (CcrSettings.CCR_FOLLOWING_INDEX_SETTING.get(engineConfig.getIndexSettings().getSettings()) == false) {
throw new IllegalArgumentException("a following engine can not be constructed for a non-following index"); throw new IllegalArgumentException("a following engine can not be constructed for a non-following index");
} }
if (engineConfig.getIndexSettings().isSoftDeleteEnabled() == false) {
throw new IllegalArgumentException("a following engine requires soft deletes to be enabled");
}
return engineConfig; return engineConfig;
} }

View File

@ -21,7 +21,7 @@ public class RestDeleteAutoFollowPatternAction extends BaseRestHandler {
public RestDeleteAutoFollowPatternAction(Settings settings, RestController controller) { public RestDeleteAutoFollowPatternAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.DELETE, "/_ccr/auto_follow/{leader_cluster}", this); controller.registerHandler(RestRequest.Method.DELETE, "/_ccr/auto_follow/{name}", this);
} }
@Override @Override
@ -32,7 +32,7 @@ public class RestDeleteAutoFollowPatternAction extends BaseRestHandler {
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
Request request = new Request(); Request request = new Request();
request.setLeaderCluster(restRequest.param("leader_cluster")); request.setName(restRequest.param("name"));
return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel));
} }

View File

@ -21,7 +21,7 @@ public class RestGetAutoFollowPatternAction extends BaseRestHandler {
public RestGetAutoFollowPatternAction(Settings settings, RestController controller) { public RestGetAutoFollowPatternAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{leader_cluster}", this); controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{name}", this);
controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow", this); controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow", this);
} }
@ -33,7 +33,7 @@ public class RestGetAutoFollowPatternAction extends BaseRestHandler {
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
Request request = new Request(); Request request = new Request();
request.setLeaderCluster(restRequest.param("leader_cluster")); request.setName(restRequest.param("name"));
return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel));
} }

View File

@ -22,7 +22,7 @@ public class RestPutAutoFollowPatternAction extends BaseRestHandler {
public RestPutAutoFollowPatternAction(Settings settings, RestController controller) { public RestPutAutoFollowPatternAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.PUT, "/_ccr/auto_follow/{leader_cluster}", this); controller.registerHandler(RestRequest.Method.PUT, "/_ccr/auto_follow/{name}", this);
} }
@Override @Override
@ -38,7 +38,7 @@ public class RestPutAutoFollowPatternAction extends BaseRestHandler {
static Request createRequest(RestRequest restRequest) throws IOException { static Request createRequest(RestRequest restRequest) throws IOException {
try (XContentParser parser = restRequest.contentOrSourceParamParser()) { try (XContentParser parser = restRequest.contentOrSourceParamParser()) {
return Request.fromXContent(parser, restRequest.param("leader_cluster")); return Request.fromXContent(parser, restRequest.param("name"));
} }
} }
} }

View File

@ -5,6 +5,7 @@
*/ */
package org.elasticsearch.xpack.ccr; package org.elasticsearch.xpack.ccr;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -26,6 +27,7 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
public class AutoFollowIT extends CcrIntegTestCase { public class AutoFollowIT extends CcrIntegTestCase {
@ -45,7 +47,12 @@ public class AutoFollowIT extends CcrIntegTestCase {
createLeaderIndex("logs-201812", leaderIndexSettings); createLeaderIndex("logs-201812", leaderIndexSettings);
// Enabling auto following: // Enabling auto following:
putAutoFollowPatterns("logs-*", "transactions-*"); if (randomBoolean()) {
putAutoFollowPatterns("my-pattern", new String[] {"logs-*", "transactions-*"});
} else {
putAutoFollowPatterns("my-pattern1", new String[] {"logs-*"});
putAutoFollowPatterns("my-pattern2", new String[] {"transactions-*"});
}
createLeaderIndex("metrics-201901", leaderIndexSettings); createLeaderIndex("metrics-201901", leaderIndexSettings);
@ -76,7 +83,7 @@ public class AutoFollowIT extends CcrIntegTestCase {
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0)
.build(); .build();
putAutoFollowPatterns("logs-*"); putAutoFollowPatterns("my-pattern", new String[] {"logs-*"});
int numIndices = randomIntBetween(4, 32); int numIndices = randomIntBetween(4, 32);
for (int i = 0; i < numIndices; i++) { for (int i = 0; i < numIndices; i++) {
createLeaderIndex("logs-" + i, leaderIndexSettings); createLeaderIndex("logs-" + i, leaderIndexSettings);
@ -90,7 +97,7 @@ public class AutoFollowIT extends CcrIntegTestCase {
deleteAutoFollowPatternSetting(); deleteAutoFollowPatternSetting();
createLeaderIndex("logs-does-not-count", leaderIndexSettings); createLeaderIndex("logs-does-not-count", leaderIndexSettings);
putAutoFollowPatterns("logs-*"); putAutoFollowPatterns("my-pattern", new String[] {"logs-*"});
int i = numIndices; int i = numIndices;
numIndices = numIndices + randomIntBetween(4, 32); numIndices = numIndices + randomIntBetween(4, 32);
for (; i < numIndices; i++) { for (; i < numIndices; i++) {
@ -113,6 +120,7 @@ public class AutoFollowIT extends CcrIntegTestCase {
// Enabling auto following: // Enabling auto following:
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName("my-pattern");
request.setLeaderCluster("leader_cluster"); request.setLeaderCluster("leader_cluster");
request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); request.setLeaderIndexPatterns(Collections.singletonList("logs-*"));
// Need to set this, because following an index in the same cluster // Need to set this, because following an index in the same cluster
@ -173,8 +181,53 @@ public class AutoFollowIT extends CcrIntegTestCase {
}); });
} }
private void putAutoFollowPatterns(String... patterns) { public void testConflictingPatterns() throws Exception {
Settings leaderIndexSettings = Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0)
.build();
// Enabling auto following:
putAutoFollowPatterns("my-pattern1", new String[] {"logs-*"});
putAutoFollowPatterns("my-pattern2", new String[] {"logs-2018*"});
createLeaderIndex("logs-201701", leaderIndexSettings);
assertBusy(() -> {
AutoFollowStats autoFollowStats = getAutoFollowStats();
assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(1L));
assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), equalTo(0L));
assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(0L));
});
IndicesExistsRequest request = new IndicesExistsRequest("copy-logs-201701");
assertTrue(followerClient().admin().indices().exists(request).actionGet().isExists());
createLeaderIndex("logs-201801", leaderIndexSettings);
assertBusy(() -> {
AutoFollowStats autoFollowStats = getAutoFollowStats();
assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(1L));
assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), greaterThanOrEqualTo(1L));
assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(0L));
assertThat(autoFollowStats.getRecentAutoFollowErrors().size(), equalTo(2));
ElasticsearchException autoFollowError1 = autoFollowStats.getRecentAutoFollowErrors().get("my-pattern1:logs-201801");
assertThat(autoFollowError1, notNullValue());
assertThat(autoFollowError1.getRootCause().getMessage(), equalTo("index to follow [logs-201801] for pattern [my-pattern1] " +
"matches with other patterns [my-pattern2]"));
ElasticsearchException autoFollowError2 = autoFollowStats.getRecentAutoFollowErrors().get("my-pattern2:logs-201801");
assertThat(autoFollowError2, notNullValue());
assertThat(autoFollowError2.getRootCause().getMessage(), equalTo("index to follow [logs-201801] for pattern [my-pattern2] " +
"matches with other patterns [my-pattern1]"));
});
request = new IndicesExistsRequest("copy-logs-201801");
assertFalse(followerClient().admin().indices().exists(request).actionGet().isExists());
}
private void putAutoFollowPatterns(String name, String[] patterns) {
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName(name);
request.setLeaderCluster("leader_cluster"); request.setLeaderCluster("leader_cluster");
request.setLeaderIndexPatterns(Arrays.asList(patterns)); request.setLeaderIndexPatterns(Arrays.asList(patterns));
// Need to set this, because following an index in the same cluster // Need to set this, because following an index in the same cluster
@ -184,7 +237,7 @@ public class AutoFollowIT extends CcrIntegTestCase {
private void deleteAutoFollowPatternSetting() { private void deleteAutoFollowPatternSetting() {
DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(); DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request();
request.setLeaderCluster("leader_cluster"); request.setName("my-pattern");
assertTrue(followerClient().execute(DeleteAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); assertTrue(followerClient().execute(DeleteAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged());
} }

View File

@ -41,6 +41,7 @@ public class AutoFollowMetadataTests extends AbstractSerializingTestCase<AutoFol
for (int i = 0; i < numEntries; i++) { for (int i = 0; i < numEntries; i++) {
List<String> leaderPatterns = Arrays.asList(generateRandomStringArray(4, 4, false)); List<String> leaderPatterns = Arrays.asList(generateRandomStringArray(4, 4, false));
AutoFollowMetadata.AutoFollowPattern autoFollowPattern = new AutoFollowMetadata.AutoFollowPattern( AutoFollowMetadata.AutoFollowPattern autoFollowPattern = new AutoFollowMetadata.AutoFollowPattern(
randomAlphaOfLength(4),
leaderPatterns, leaderPatterns,
randomAlphaOfLength(4), randomAlphaOfLength(4),
randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE),

View File

@ -118,6 +118,7 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase {
public void testThatPutAutoFollowPatternsIsUnavailableWithNonCompliantLicense() throws InterruptedException { public void testThatPutAutoFollowPatternsIsUnavailableWithNonCompliantLicense() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
final PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); final PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName("name");
request.setLeaderCluster("leader"); request.setLeaderCluster("leader");
request.setLeaderIndexPatterns(Collections.singletonList("*")); request.setLeaderIndexPatterns(Collections.singletonList("*"));
client().execute( client().execute(
@ -147,8 +148,8 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase {
@Override @Override
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
AutoFollowPattern autoFollowPattern = AutoFollowPattern autoFollowPattern = new AutoFollowPattern("test_alias", Collections.singletonList("logs-*"),
new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); null, null, null, null, null, null, null, null);
AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata( AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(
Collections.singletonMap("test_alias", autoFollowPattern), Collections.singletonMap("test_alias", autoFollowPattern),
Collections.emptyMap(), Collections.emptyMap(),

View File

@ -683,6 +683,7 @@ public class IndexFollowingIT extends CcrIntegTestCase {
() -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest.getFollowRequest()).actionGet()); () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest.getFollowRequest()).actionGet());
assertThat(e.getMessage(), equalTo("unknown cluster alias [another_cluster]")); assertThat(e.getMessage(), equalTo("unknown cluster alias [another_cluster]"));
PutAutoFollowPatternAction.Request putAutoFollowRequest = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request putAutoFollowRequest = new PutAutoFollowPatternAction.Request();
putAutoFollowRequest.setName("name");
putAutoFollowRequest.setLeaderCluster("another_cluster"); putAutoFollowRequest.setLeaderCluster("another_cluster");
putAutoFollowRequest.setLeaderIndexPatterns(Collections.singletonList("logs-*")); putAutoFollowRequest.setLeaderIndexPatterns(Collections.singletonList("logs-*"));
e = expectThrows(IllegalArgumentException.class, e = expectThrows(IllegalArgumentException.class,

View File

@ -56,7 +56,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
.build(); .build();
AutoFollowPattern autoFollowPattern = AutoFollowPattern autoFollowPattern =
new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null);
Map<String, AutoFollowPattern> patterns = new HashMap<>(); Map<String, AutoFollowPattern> patterns = new HashMap<>();
patterns.put("remote", autoFollowPattern); patterns.put("remote", autoFollowPattern);
Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>(); Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>();
@ -120,7 +120,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
when(client.getRemoteClusterClient(anyString())).thenReturn(client); when(client.getRemoteClusterClient(anyString())).thenReturn(client);
AutoFollowPattern autoFollowPattern = AutoFollowPattern autoFollowPattern =
new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null);
Map<String, AutoFollowPattern> patterns = new HashMap<>(); Map<String, AutoFollowPattern> patterns = new HashMap<>();
patterns.put("remote", autoFollowPattern); patterns.put("remote", autoFollowPattern);
Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>(); Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>();
@ -178,7 +178,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
.build(); .build();
AutoFollowPattern autoFollowPattern = AutoFollowPattern autoFollowPattern =
new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null);
Map<String, AutoFollowPattern> patterns = new HashMap<>(); Map<String, AutoFollowPattern> patterns = new HashMap<>();
patterns.put("remote", autoFollowPattern); patterns.put("remote", autoFollowPattern);
Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>(); Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>();
@ -241,7 +241,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
.build(); .build();
AutoFollowPattern autoFollowPattern = AutoFollowPattern autoFollowPattern =
new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null);
Map<String, AutoFollowPattern> patterns = new HashMap<>(); Map<String, AutoFollowPattern> patterns = new HashMap<>();
patterns.put("remote", autoFollowPattern); patterns.put("remote", autoFollowPattern);
Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>(); Map<String, List<String>> followedLeaderIndexUUIDS = new HashMap<>();
@ -295,7 +295,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
public void testGetLeaderIndicesToFollow() { public void testGetLeaderIndicesToFollow() {
AutoFollowPattern autoFollowPattern = AutoFollowPattern autoFollowPattern =
new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null);
Map<String, Map<String, String>> headers = new HashMap<>(); Map<String, Map<String, String>> headers = new HashMap<>();
ClusterState followerState = ClusterState.builder(new ClusterName("remote")) ClusterState followerState = ClusterState.builder(new ClusterName("remote"))
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
@ -342,15 +342,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
} }
public void testGetFollowerIndexName() { public void testGetFollowerIndexName() {
AutoFollowPattern autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null,
null, null, null, null, null, null); null, null, null, null, null, null);
assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0")); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0"));
autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-metrics-0", null, null,
null, null, null, null, null); null, null, null, null, null);
assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0"));
autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null,
null, null, null, null, null, null); null, null, null, null, null, null);
assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0"));
} }

View File

@ -18,7 +18,7 @@ public class DeleteAutoFollowPatternRequestTests extends AbstractStreamableTestC
@Override @Override
protected DeleteAutoFollowPatternAction.Request createTestInstance() { protected DeleteAutoFollowPatternAction.Request createTestInstance() {
DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(); DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request();
request.setLeaderCluster(randomAlphaOfLength(4)); request.setName(randomAlphaOfLength(4));
return request; return request;
} }
} }

View File

@ -20,7 +20,7 @@ public class GetAutoFollowPatternRequestTests extends AbstractWireSerializingTes
protected GetAutoFollowPatternAction.Request createTestInstance() { protected GetAutoFollowPatternAction.Request createTestInstance() {
GetAutoFollowPatternAction.Request request = new GetAutoFollowPatternAction.Request(); GetAutoFollowPatternAction.Request request = new GetAutoFollowPatternAction.Request();
if (randomBoolean()) { if (randomBoolean()) {
request.setLeaderCluster(randomAlphaOfLength(4)); request.setName(randomAlphaOfLength(4));
} }
return request; return request;
} }

View File

@ -29,6 +29,7 @@ public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCas
Map<String, AutoFollowPattern> patterns = new HashMap<>(numPatterns); Map<String, AutoFollowPattern> patterns = new HashMap<>(numPatterns);
for (int i = 0; i < numPatterns; i++) { for (int i = 0; i < numPatterns; i++) {
AutoFollowPattern autoFollowPattern = new AutoFollowPattern( AutoFollowPattern autoFollowPattern = new AutoFollowPattern(
"remote",
Collections.singletonList(randomAlphaOfLength(4)), Collections.singletonList(randomAlphaOfLength(4)),
randomAlphaOfLength(4), randomAlphaOfLength(4),
randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE),

View File

@ -41,6 +41,7 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent
@Override @Override
protected PutAutoFollowPatternAction.Request createTestInstance() { protected PutAutoFollowPatternAction.Request createTestInstance() {
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName(randomAlphaOfLength(4));
request.setLeaderCluster(randomAlphaOfLength(4)); request.setLeaderCluster(randomAlphaOfLength(4));
request.setLeaderIndexPatterns(Arrays.asList(generateRandomStringArray(4, 4, false))); request.setLeaderIndexPatterns(Arrays.asList(generateRandomStringArray(4, 4, false)));
if (randomBoolean()) { if (randomBoolean()) {
@ -74,6 +75,11 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
ActionRequestValidationException validationException = request.validate(); ActionRequestValidationException validationException = request.validate();
assertThat(validationException, notNullValue()); assertThat(validationException, notNullValue());
assertThat(validationException.getMessage(), containsString("[name] is missing"));
request.setName("name");
validationException = request.validate();
assertThat(validationException, notNullValue());
assertThat(validationException.getMessage(), containsString("[leader_cluster] is missing")); assertThat(validationException.getMessage(), containsString("[leader_cluster] is missing"));
request.setLeaderCluster("_alias"); request.setLeaderCluster("_alias");

View File

@ -26,7 +26,8 @@ public class ShardChangesResponseTests extends AbstractStreamableTestCase<ShardC
leaderGlobalCheckpoint, leaderGlobalCheckpoint,
leaderMaxSeqNo, leaderMaxSeqNo,
maxSeqNoOfUpdatesOrDeletes, maxSeqNoOfUpdatesOrDeletes,
operations operations,
randomNonNegativeLong()
); );
} }

View File

@ -158,7 +158,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
final long globalCheckpoint = tracker.getCheckpoint(); final long globalCheckpoint = tracker.getCheckpoint();
final long maxSeqNo = tracker.getMaxSeqNo(); final long maxSeqNo = tracker.getMaxSeqNo();
handler.accept(new ShardChangesAction.Response( handler.accept(new ShardChangesAction.Response(
0L, globalCheckpoint, maxSeqNo, randomNonNegativeLong(), new Translog.Operation[0])); 0L, globalCheckpoint, maxSeqNo, randomNonNegativeLong(), new Translog.Operation[0], 1L));
} }
}; };
threadPool.generic().execute(task); threadPool.generic().execute(task);
@ -233,7 +233,8 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
nextGlobalCheckPoint, nextGlobalCheckPoint,
nextGlobalCheckPoint, nextGlobalCheckPoint,
randomNonNegativeLong(), randomNonNegativeLong(),
ops.toArray(EMPTY)) ops.toArray(EMPTY),
randomNonNegativeLong())
) )
); );
responses.put(prevGlobalCheckpoint, item); responses.put(prevGlobalCheckpoint, item);
@ -256,7 +257,8 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
prevGlobalCheckpoint, prevGlobalCheckpoint,
prevGlobalCheckpoint, prevGlobalCheckpoint,
randomNonNegativeLong(), randomNonNegativeLong(),
EMPTY EMPTY,
randomNonNegativeLong()
); );
item.add(new TestResponse(null, mappingVersion, response)); item.add(new TestResponse(null, mappingVersion, response));
} }
@ -273,7 +275,8 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
localLeaderGCP, localLeaderGCP,
localLeaderGCP, localLeaderGCP,
randomNonNegativeLong(), randomNonNegativeLong(),
ops.toArray(EMPTY) ops.toArray(EMPTY),
randomNonNegativeLong()
); );
item.add(new TestResponse(null, mappingVersion, response)); item.add(new TestResponse(null, mappingVersion, response));
responses.put(fromSeqNo, Collections.unmodifiableList(item)); responses.put(fromSeqNo, Collections.unmodifiableList(item));

View File

@ -56,6 +56,7 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase<
randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(),
randomReadExceptions(), randomReadExceptions(),
randomLong(), randomLong(),
randomBoolean() ? new ElasticsearchException("fatal error") : null); randomBoolean() ? new ElasticsearchException("fatal error") : null);

View File

@ -439,7 +439,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); assertThat(shardChangesRequests.get(0)[1], equalTo(64L));
shardChangesRequests.clear(); shardChangesRequests.clear();
task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, 100, new Translog.Operation[0])); task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, 100, new Translog.Operation[0], 1L));
assertThat(shardChangesRequests.size(), equalTo(1)); assertThat(shardChangesRequests.size(), equalTo(1));
assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); assertThat(shardChangesRequests.get(0)[0], equalTo(0L));
@ -782,7 +782,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
leaderGlobalCheckpoints.poll(), leaderGlobalCheckpoints.poll(),
maxSeqNos.poll(), maxSeqNos.poll(),
randomNonNegativeLong(), randomNonNegativeLong(),
operations operations,
1L
); );
handler.accept(response); handler.accept(response);
} }
@ -813,7 +814,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
leaderGlobalCheckPoint, leaderGlobalCheckPoint,
leaderGlobalCheckPoint, leaderGlobalCheckPoint,
randomNonNegativeLong(), randomNonNegativeLong(),
ops.toArray(new Translog.Operation[0]) ops.toArray(new Translog.Operation[0]),
1L
); );
} }

View File

@ -429,7 +429,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes();
if (from > seqNoStats.getGlobalCheckpoint()) { if (from > seqNoStats.getGlobalCheckpoint()) {
handler.accept(ShardChangesAction.getResponse(1L, seqNoStats, handler.accept(ShardChangesAction.getResponse(1L, seqNoStats,
maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY)); maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY, 1L));
return; return;
} }
Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from,
@ -440,7 +440,8 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
seqNoStats.getGlobalCheckpoint(), seqNoStats.getGlobalCheckpoint(),
seqNoStats.getMaxSeqNo(), seqNoStats.getMaxSeqNo(),
maxSeqNoOfUpdatesOrDeletes, maxSeqNoOfUpdatesOrDeletes,
ops ops,
1L
); );
handler.accept(response); handler.accept(response);
return; return;

View File

@ -49,6 +49,7 @@ public class StatsResponsesTests extends AbstractStreamableTestCase<FollowStatsA
randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(),
Collections.emptyNavigableMap(), Collections.emptyNavigableMap(),
randomLong(), randomLong(),
randomBoolean() ? new ElasticsearchException("fatal error") : null); randomBoolean() ? new ElasticsearchException("fatal error") : null);

View File

@ -10,6 +10,7 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction.Request; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction.Request;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata;
@ -27,28 +28,28 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase {
public void testInnerDelete() { public void testInnerDelete() {
Map<String, List<String>> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); Map<String, List<String>> existingAlreadyFollowedIndexUUIDS = new HashMap<>();
Map<String, Map<String, String>> existingHeaders = new HashMap<>(); Map<String, Map<String, String>> existingHeaders = new HashMap<>();
Map<String, AutoFollowMetadata.AutoFollowPattern> existingAutoFollowPatterns = new HashMap<>(); Map<String, AutoFollowPattern> existingAutoFollowPatterns = new HashMap<>();
{ {
List<String> existingPatterns = new ArrayList<>(); List<String> existingPatterns = new ArrayList<>();
existingPatterns.add("transactions-*"); existingPatterns.add("transactions-*");
existingAutoFollowPatterns.put("eu_cluster", existingAutoFollowPatterns.put("name1",
new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null));
List<String> existingUUIDS = new ArrayList<>(); List<String> existingUUIDS = new ArrayList<>();
existingUUIDS.add("_val"); existingUUIDS.add("_val");
existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); existingAlreadyFollowedIndexUUIDS.put("name1", existingUUIDS);
existingHeaders.put("eu_cluster", Collections.singletonMap("key", "val")); existingHeaders.put("name1", Collections.singletonMap("key", "val"));
} }
{ {
List<String> existingPatterns = new ArrayList<>(); List<String> existingPatterns = new ArrayList<>();
existingPatterns.add("logs-*"); existingPatterns.add("logs-*");
existingAutoFollowPatterns.put("asia_cluster", existingAutoFollowPatterns.put("name2",
new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); new AutoFollowPattern("asia_cluster", existingPatterns, null, null, null, null, null, null, null, null));
List<String> existingUUIDS = new ArrayList<>(); List<String> existingUUIDS = new ArrayList<>();
existingUUIDS.add("_val"); existingUUIDS.add("_val");
existingAlreadyFollowedIndexUUIDS.put("asia_cluster", existingUUIDS); existingAlreadyFollowedIndexUUIDS.put("name2", existingUUIDS);
existingHeaders.put("asia_cluster", Collections.singletonMap("key", "val")); existingHeaders.put("name2", Collections.singletonMap("key", "val"));
} }
ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster"))
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
@ -56,27 +57,28 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase {
.build(); .build();
Request request = new Request(); Request request = new Request();
request.setLeaderCluster("eu_cluster"); request.setName("name1");
AutoFollowMetadata result = TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState) AutoFollowMetadata result = TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)
.getMetaData() .getMetaData()
.custom(AutoFollowMetadata.TYPE); .custom(AutoFollowMetadata.TYPE);
assertThat(result.getPatterns().size(), equalTo(1)); assertThat(result.getPatterns().size(), equalTo(1));
assertThat(result.getPatterns().get("asia_cluster"), notNullValue()); assertThat(result.getPatterns().get("name2"), notNullValue());
assertThat(result.getPatterns().get("name2").getLeaderCluster(), equalTo("asia_cluster"));
assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1));
assertThat(result.getFollowedLeaderIndexUUIDs().get("asia_cluster"), notNullValue()); assertThat(result.getFollowedLeaderIndexUUIDs().get("name2"), notNullValue());
assertThat(result.getHeaders().size(), equalTo(1)); assertThat(result.getHeaders().size(), equalTo(1));
assertThat(result.getHeaders().get("asia_cluster"), notNullValue()); assertThat(result.getHeaders().get("name2"), notNullValue());
} }
public void testInnerDeleteDoesNotExist() { public void testInnerDeleteDoesNotExist() {
Map<String, List<String>> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); Map<String, List<String>> existingAlreadyFollowedIndexUUIDS = new HashMap<>();
Map<String, AutoFollowMetadata.AutoFollowPattern> existingAutoFollowPatterns = new HashMap<>(); Map<String, AutoFollowPattern> existingAutoFollowPatterns = new HashMap<>();
Map<String, Map<String, String>> existingHeaders = new HashMap<>(); Map<String, Map<String, String>> existingHeaders = new HashMap<>();
{ {
List<String> existingPatterns = new ArrayList<>(); List<String> existingPatterns = new ArrayList<>();
existingPatterns.add("transactions-*"); existingPatterns.add("transactions-*");
existingAutoFollowPatterns.put("eu_cluster", existingAutoFollowPatterns.put("name1",
new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null));
existingHeaders.put("key", Collections.singletonMap("key", "val")); existingHeaders.put("key", Collections.singletonMap("key", "val"));
} }
ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster"))
@ -85,10 +87,10 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase {
.build(); .build();
Request request = new Request(); Request request = new Request();
request.setLeaderCluster("asia_cluster"); request.setName("name2");
Exception e = expectThrows(ResourceNotFoundException.class, Exception e = expectThrows(ResourceNotFoundException.class,
() -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)); () -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState));
assertThat(e.getMessage(), equalTo("no auto-follow patterns for cluster alias [asia_cluster] found")); assertThat(e.getMessage(), equalTo("auto-follow pattern [name2] is missing"));
} }
public void testInnerDeleteNoAutoFollowMetadata() { public void testInnerDeleteNoAutoFollowMetadata() {
@ -97,10 +99,10 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase {
.build(); .build();
Request request = new Request(); Request request = new Request();
request.setLeaderCluster("asia_cluster"); request.setName("name1");
Exception e = expectThrows(ResourceNotFoundException.class, Exception e = expectThrows(ResourceNotFoundException.class,
() -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)); () -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState));
assertThat(e.getMessage(), equalTo("no auto-follow patterns for cluster alias [asia_cluster] found")); assertThat(e.getMessage(), equalTo("auto-follow pattern [name1] is missing"));
} }
} }

View File

@ -23,22 +23,22 @@ public class TransportGetAutoFollowPatternActionTests extends ESTestCase {
public void testGetAutoFollowPattern() { public void testGetAutoFollowPattern() {
Map<String, AutoFollowPattern> patterns = new HashMap<>(); Map<String, AutoFollowPattern> patterns = new HashMap<>();
patterns.put("test_alias1", patterns.put("name1",
new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); new AutoFollowPattern("test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null));
patterns.put("test_alias2", patterns.put("name2",
new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); new AutoFollowPattern("test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null));
MetaData metaData = MetaData.builder() MetaData metaData = MetaData.builder()
.putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())) .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))
.build(); .build();
Map<String, AutoFollowPattern> result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias1"); Map<String, AutoFollowPattern> result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "name1");
assertThat(result.size(), equalTo(1)); assertThat(result.size(), equalTo(1));
assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); assertThat(result, hasEntry("name1", patterns.get("name1")));
result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, null); result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, null);
assertThat(result.size(), equalTo(2)); assertThat(result.size(), equalTo(2));
assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); assertThat(result, hasEntry("name1", patterns.get("name1")));
assertThat(result, hasEntry("test_alias2", patterns.get("test_alias2"))); assertThat(result, hasEntry("name2", patterns.get("name2")));
expectThrows(ResourceNotFoundException.class, expectThrows(ResourceNotFoundException.class,
() -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "another_alias")); () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "another_alias"));
@ -51,13 +51,13 @@ public class TransportGetAutoFollowPatternActionTests extends ESTestCase {
.putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata) .putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)
.build(); .build();
expectThrows(ResourceNotFoundException.class, expectThrows(ResourceNotFoundException.class,
() -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "name1"));
} }
public void testGetAutoFollowPatternNoAutoFollowMetadata() { public void testGetAutoFollowPatternNoAutoFollowMetadata() {
MetaData metaData = MetaData.builder().build(); MetaData metaData = MetaData.builder().build();
expectThrows(ResourceNotFoundException.class, expectThrows(ResourceNotFoundException.class,
() -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "name1"));
} }
} }

View File

@ -12,6 +12,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction;
import java.util.ArrayList; import java.util.ArrayList;
@ -28,6 +29,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase {
public void testInnerPut() { public void testInnerPut() {
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName("name1");
request.setLeaderCluster("eu_cluster"); request.setLeaderCluster("eu_cluster");
request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); request.setLeaderIndexPatterns(Collections.singletonList("logs-*"));
@ -43,14 +45,16 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase {
AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE);
assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata, notNullValue());
assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster"));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(1));
assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*"));
assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1));
assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(0)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("name1").size(), equalTo(0));
} }
public void testInnerPut_existingLeaderIndices() { public void testInnerPut_existingLeaderIndices() {
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName("name1");
request.setLeaderCluster("eu_cluster"); request.setLeaderCluster("eu_cluster");
request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); request.setLeaderIndexPatterns(Collections.singletonList("logs-*"));
@ -82,28 +86,30 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase {
AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE);
assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata, notNullValue());
assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster"));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(1));
assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*"));
assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1));
assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numMatchingLeaderIndices)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("name1").size(), equalTo(numMatchingLeaderIndices));
} }
public void testInnerPut_existingLeaderIndicesAndAutoFollowMetadata() { public void testInnerPut_existingLeaderIndicesAndAutoFollowMetadata() {
PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName("name1");
request.setLeaderCluster("eu_cluster"); request.setLeaderCluster("eu_cluster");
request.setLeaderIndexPatterns(Arrays.asList("logs-*", "transactions-*")); request.setLeaderIndexPatterns(Arrays.asList("logs-*", "transactions-*"));
Map<String, AutoFollowMetadata.AutoFollowPattern> existingAutoFollowPatterns = new HashMap<>(); Map<String, AutoFollowPattern> existingAutoFollowPatterns = new HashMap<>();
List<String> existingPatterns = new ArrayList<>(); List<String> existingPatterns = new ArrayList<>();
existingPatterns.add("transactions-*"); existingPatterns.add("transactions-*");
existingAutoFollowPatterns.put("eu_cluster", existingAutoFollowPatterns.put("name1",
new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null));
Map<String, List<String>> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); Map<String, List<String>> existingAlreadyFollowedIndexUUIDS = new HashMap<>();
List<String> existingUUIDS = new ArrayList<>(); List<String> existingUUIDS = new ArrayList<>();
existingUUIDS.add("_val"); existingUUIDS.add("_val");
existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); existingAlreadyFollowedIndexUUIDS.put("name1", existingUUIDS);
Map<String, Map<String, String>> existingHeaders = new HashMap<>(); Map<String, Map<String, String>> existingHeaders = new HashMap<>();
existingHeaders.put("eu_cluster", Collections.singletonMap("key", "val")); existingHeaders.put("name1", Collections.singletonMap("key", "val"));
ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) ClusterState localState = ClusterState.builder(new ClusterName("us_cluster"))
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
@ -127,13 +133,14 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase {
AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE);
assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata, notNullValue());
assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(2)); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster"));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(2));
assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(1), equalTo("transactions-*")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*"));
assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(1), equalTo("transactions-*"));
assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1));
assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numLeaderIndices + 1)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("name1").size(), equalTo(numLeaderIndices + 1));
assertThat(autoFollowMetadata.getHeaders().size(), equalTo(1)); assertThat(autoFollowMetadata.getHeaders().size(), equalTo(1));
assertThat(autoFollowMetadata.getHeaders().get("eu_cluster"), notNullValue()); assertThat(autoFollowMetadata.getHeaders().get("name1"), notNullValue());
} }
} }

View File

@ -85,11 +85,20 @@ public class TransportResumeFollowActionTests extends ESTestCase {
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null));
assertThat(e.getMessage(), equalTo("leader index [leader_cluster:index1] does not have soft deletes enabled")); assertThat(e.getMessage(), equalTo("leader index [leader_cluster:index1] does not have soft deletes enabled"));
} }
{
// should fail because the follower index does not have soft deletes enabled
IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
IndexMetaData followIMD = createIMD("index2", 5, Settings.EMPTY, customMetaData);
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null));
assertThat(e.getMessage(), equalTo("follower index [index2] does not have soft deletes enabled"));
}
{ {
// should fail because the number of primary shards between leader and follow index are not equal // should fail because the number of primary shards between leader and follow index are not equal
IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder() IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
IndexMetaData followIMD = createIMD("index2", 4, Settings.EMPTY, customMetaData); IndexMetaData followIMD = createIMD("index2", 4,
Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData);
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null));
assertThat(e.getMessage(), assertThat(e.getMessage(),
equalTo("leader index primary shards [5] does not match with the number of shards of the follow index [4]")); equalTo("leader index primary shards [5] does not match with the number of shards of the follow index [4]"));
@ -98,8 +107,8 @@ public class TransportResumeFollowActionTests extends ESTestCase {
// should fail, because leader index is closed // should fail, because leader index is closed
IndexMetaData leaderIMD = createIMD("index1", State.CLOSE, "{}", 5, Settings.builder() IndexMetaData leaderIMD = createIMD("index1", State.CLOSE, "{}", 5, Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
IndexMetaData followIMD = createIMD("index2", State.OPEN, "{}", 5, Settings.builder() IndexMetaData followIMD = createIMD("index2", State.OPEN, "{}", 5,
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData); Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData);
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null));
assertThat(e.getMessage(), equalTo("leader and follow index must be open")); assertThat(e.getMessage(), equalTo("leader and follow index must be open"));
} }
@ -107,7 +116,8 @@ public class TransportResumeFollowActionTests extends ESTestCase {
// should fail, because index.xpack.ccr.following_index setting has not been enabled in leader index // should fail, because index.xpack.ccr.following_index setting has not been enabled in leader index
IndexMetaData leaderIMD = createIMD("index1", 1, IndexMetaData leaderIMD = createIMD("index1", 1,
Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
IndexMetaData followIMD = createIMD("index2", 1, Settings.EMPTY, customMetaData); IndexMetaData followIMD = createIMD("index2", 1,
Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData);
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2"); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2");
mapperService.updateMapping(null, followIMD); mapperService.updateMapping(null, followIMD);
Exception e = expectThrows(IllegalArgumentException.class, Exception e = expectThrows(IllegalArgumentException.class,
@ -120,7 +130,8 @@ public class TransportResumeFollowActionTests extends ESTestCase {
IndexMetaData leaderIMD = createIMD("index1", State.OPEN, "{\"properties\": {\"field\": {\"type\": \"keyword\"}}}", 5, IndexMetaData leaderIMD = createIMD("index1", State.OPEN, "{\"properties\": {\"field\": {\"type\": \"keyword\"}}}", 5,
Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
IndexMetaData followIMD = createIMD("index2", State.OPEN, "{\"properties\": {\"field\": {\"type\": \"text\"}}}", 5, IndexMetaData followIMD = createIMD("index2", State.OPEN, "{\"properties\": {\"field\": {\"type\": \"text\"}}}", 5,
Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(), customMetaData); Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build(), customMetaData);
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2"); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2");
mapperService.updateMapping(null, followIMD); mapperService.updateMapping(null, followIMD);
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, mapperService)); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, mapperService));
@ -135,6 +146,7 @@ public class TransportResumeFollowActionTests extends ESTestCase {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace").build(), null); .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace").build(), null);
IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder() IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder()
.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.type", "custom")
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData); .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData);
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null));
@ -144,8 +156,8 @@ public class TransportResumeFollowActionTests extends ESTestCase {
// should fail because the following index does not have the following_index settings // should fail because the following index does not have the following_index settings
IndexMetaData leaderIMD = createIMD("index1", 5, IndexMetaData leaderIMD = createIMD("index1", 5,
Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
Settings followingIndexSettings = randomBoolean() ? Settings.EMPTY : Settings followingIndexSettings = Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), false).build(); .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), false).build();
IndexMetaData followIMD = createIMD("index2", 5, followingIndexSettings, customMetaData); IndexMetaData followIMD = createIMD("index2", 5, followingIndexSettings, customMetaData);
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(),
followingIndexSettings, "index2"); followingIndexSettings, "index2");
@ -160,6 +172,7 @@ public class TransportResumeFollowActionTests extends ESTestCase {
IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder() IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null);
IndexMetaData followIMD = createIMD("index2", 5, Settings.builder() IndexMetaData followIMD = createIMD("index2", 5, Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(), customMetaData); .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(), customMetaData);
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2"); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2");
mapperService.updateMapping(null, followIMD); mapperService.updateMapping(null, followIMD);
@ -174,6 +187,7 @@ public class TransportResumeFollowActionTests extends ESTestCase {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), null); .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), null);
IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder() IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder()
.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.type", "custom")
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData); .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData);
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(),
@ -191,6 +205,7 @@ public class TransportResumeFollowActionTests extends ESTestCase {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), null); .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), null);
IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder() IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder()
.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s") .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s")
.put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.type", "custom")
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData); .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData);

View File

@ -41,7 +41,8 @@ public class BulkShardOperationsTests extends IndexShardTestCase {
// test that we use the primary term on the follower when applying operations from the leader // test that we use the primary term on the follower when applying operations from the leader
public void testPrimaryTermFromFollower() throws IOException { public void testPrimaryTermFromFollower() throws IOException {
final Settings settings = Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(); final Settings settings = Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
final IndexShard followerPrimary = newStartedShard(true, settings, new FollowingEngineFactory()); final IndexShard followerPrimary = newStartedShard(true, settings, new FollowingEngineFactory());
// we use this primary on the operations yet we expect the applied operations to have the primary term of the follower // we use this primary on the operations yet we expect the applied operations to have the primary term of the follower

View File

@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.IndexShardTestCase;
@ -31,6 +32,7 @@ public class FollowEngineIndexShardTests extends IndexShardTestCase {
public void testDoNotFillGaps() throws Exception { public void testDoNotFillGaps() throws Exception {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.build(); .build();
final IndexShard indexShard = newStartedShard(false, settings, new FollowingEngineFactory()); final IndexShard indexShard = newStartedShard(false, settings, new FollowingEngineFactory());

View File

@ -127,6 +127,7 @@ public class FollowingEngineTests extends ESTestCase {
.put("index.number_of_replicas", 0) .put("index.number_of_replicas", 0)
.put("index.version.created", Version.CURRENT) .put("index.version.created", Version.CURRENT)
.put("index.xpack.ccr.following_index", true) .put("index.xpack.ccr.following_index", true)
.put("index.soft_deletes.enabled", true)
.build(); .build();
final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build();
final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings);
@ -152,6 +153,7 @@ public class FollowingEngineTests extends ESTestCase {
.put("index.number_of_replicas", 0) .put("index.number_of_replicas", 0)
.put("index.version.created", Version.CURRENT) .put("index.version.created", Version.CURRENT)
.put("index.xpack.ccr.following_index", true) .put("index.xpack.ccr.following_index", true)
.put("index.soft_deletes.enabled", true)
.build(); .build();
final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build();
final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings);
@ -186,6 +188,7 @@ public class FollowingEngineTests extends ESTestCase {
.put("index.number_of_replicas", 0) .put("index.number_of_replicas", 0)
.put("index.version.created", Version.CURRENT) .put("index.version.created", Version.CURRENT)
.put("index.xpack.ccr.following_index", true) .put("index.xpack.ccr.following_index", true)
.put("index.soft_deletes.enabled", true)
.build(); .build();
final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build();
final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings);
@ -216,6 +219,7 @@ public class FollowingEngineTests extends ESTestCase {
.put("index.number_of_replicas", 0) .put("index.number_of_replicas", 0)
.put("index.version.created", Version.CURRENT) .put("index.version.created", Version.CURRENT)
.put("index.xpack.ccr.following_index", true) .put("index.xpack.ccr.following_index", true)
.put("index.soft_deletes.enabled", true)
.build(); .build();
final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build();
final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings);

View File

@ -94,6 +94,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase<Fol
final int numberOfQueuedWrites = randomIntBetween(0, Integer.MAX_VALUE); final int numberOfQueuedWrites = randomIntBetween(0, Integer.MAX_VALUE);
final long mappingVersion = randomIntBetween(0, Integer.MAX_VALUE); final long mappingVersion = randomIntBetween(0, Integer.MAX_VALUE);
final long totalFetchTimeMillis = randomLongBetween(0, 4096); final long totalFetchTimeMillis = randomLongBetween(0, 4096);
final long totalFetchTookTimeMillis = randomLongBetween(0, 4096);
final long numberOfSuccessfulFetches = randomNonNegativeLong(); final long numberOfSuccessfulFetches = randomNonNegativeLong();
final long numberOfFailedFetches = randomLongBetween(0, 8); final long numberOfFailedFetches = randomLongBetween(0, 8);
final long operationsReceived = randomNonNegativeLong(); final long operationsReceived = randomNonNegativeLong();
@ -122,6 +123,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase<Fol
numberOfQueuedWrites, numberOfQueuedWrites,
mappingVersion, mappingVersion,
totalFetchTimeMillis, totalFetchTimeMillis,
totalFetchTookTimeMillis,
numberOfSuccessfulFetches, numberOfSuccessfulFetches,
numberOfFailedFetches, numberOfFailedFetches,
operationsReceived, operationsReceived,
@ -166,6 +168,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase<Fol
+ "\"number_of_queued_writes\":" + numberOfQueuedWrites + "," + "\"number_of_queued_writes\":" + numberOfQueuedWrites + ","
+ "\"mapping_version\":" + mappingVersion + "," + "\"mapping_version\":" + mappingVersion + ","
+ "\"total_fetch_time_millis\":" + totalFetchTimeMillis + "," + "\"total_fetch_time_millis\":" + totalFetchTimeMillis + ","
+ "\"total_fetch_leader_time_millis\":" + totalFetchTookTimeMillis + ","
+ "\"number_of_successful_fetches\":" + numberOfSuccessfulFetches + "," + "\"number_of_successful_fetches\":" + numberOfSuccessfulFetches + ","
+ "\"number_of_failed_fetches\":" + numberOfFailedFetches + "," + "\"number_of_failed_fetches\":" + numberOfFailedFetches + ","
+ "\"operations_received\":" + operationsReceived + "," + "\"operations_received\":" + operationsReceived + ","
@ -208,6 +211,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase<Fol
1, 1,
1, 1,
100, 100,
50,
10, 10,
0, 0,
10, 10,
@ -226,7 +230,6 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase<Fol
Map<String, Object> template = Map<String, Object> template =
XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false);
Map<?, ?> followStatsMapping = (Map<?, ?>) XContentMapValues.extractValue("mappings.doc.properties.ccr_stats.properties", template); Map<?, ?> followStatsMapping = (Map<?, ?>) XContentMapValues.extractValue("mappings.doc.properties.ccr_stats.properties", template);
assertThat(serializedStatus.size(), equalTo(followStatsMapping.size())); assertThat(serializedStatus.size(), equalTo(followStatsMapping.size()));
for (Map.Entry<String, Object> entry : serializedStatus.entrySet()) { for (Map.Entry<String, Object> entry : serializedStatus.entrySet()) {
String fieldName = entry.getKey(); String fieldName = entry.getKey();

View File

@ -175,6 +175,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
public static class AutoFollowPattern implements Writeable, ToXContentObject { public static class AutoFollowPattern implements Writeable, ToXContentObject {
public static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster");
public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns"); public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns");
public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern"); public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern");
public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count");
@ -188,10 +189,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static final ConstructingObjectParser<AutoFollowPattern, Void> PARSER = private static final ConstructingObjectParser<AutoFollowPattern, Void> PARSER =
new ConstructingObjectParser<>("auto_follow_pattern", new ConstructingObjectParser<>("auto_follow_pattern",
args -> new AutoFollowPattern((List<String>) args[0], (String) args[1], (Integer) args[2], (Integer) args[3], args -> new AutoFollowPattern((String) args[0], (List<String>) args[1], (String) args[2], (Integer) args[3],
(ByteSizeValue) args[4], (Integer) args[5], (Integer) args[6], (TimeValue) args[7], (TimeValue) args[8])); (Integer) args[4], (ByteSizeValue) args[5], (Integer) args[6], (Integer) args[7], (TimeValue) args[8],
(TimeValue) args[9]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_CLUSTER_FIELD);
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT);
@ -211,6 +214,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
POLL_TIMEOUT, ObjectParser.ValueType.STRING); POLL_TIMEOUT, ObjectParser.ValueType.STRING);
} }
private final String leaderCluster;
private final List<String> leaderIndexPatterns; private final List<String> leaderIndexPatterns;
private final String followIndexPattern; private final String followIndexPattern;
private final Integer maxBatchOperationCount; private final Integer maxBatchOperationCount;
@ -221,7 +225,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
private final TimeValue maxRetryDelay; private final TimeValue maxRetryDelay;
private final TimeValue pollTimeout; private final TimeValue pollTimeout;
public AutoFollowPattern(List<String> leaderIndexPatterns, public AutoFollowPattern(String leaderCluster,
List<String> leaderIndexPatterns,
String followIndexPattern, String followIndexPattern,
Integer maxBatchOperationCount, Integer maxBatchOperationCount,
Integer maxConcurrentReadBatches, Integer maxConcurrentReadBatches,
@ -230,6 +235,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
Integer maxWriteBufferSize, Integer maxWriteBufferSize,
TimeValue maxRetryDelay, TimeValue maxRetryDelay,
TimeValue pollTimeout) { TimeValue pollTimeout) {
this.leaderCluster = leaderCluster;
this.leaderIndexPatterns = leaderIndexPatterns; this.leaderIndexPatterns = leaderIndexPatterns;
this.followIndexPattern = followIndexPattern; this.followIndexPattern = followIndexPattern;
this.maxBatchOperationCount = maxBatchOperationCount; this.maxBatchOperationCount = maxBatchOperationCount;
@ -242,6 +248,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
} }
public AutoFollowPattern(StreamInput in) throws IOException { public AutoFollowPattern(StreamInput in) throws IOException {
leaderCluster = in.readString();
leaderIndexPatterns = in.readList(StreamInput::readString); leaderIndexPatterns = in.readList(StreamInput::readString);
followIndexPattern = in.readOptionalString(); followIndexPattern = in.readOptionalString();
maxBatchOperationCount = in.readOptionalVInt(); maxBatchOperationCount = in.readOptionalVInt();
@ -261,6 +268,10 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
return Regex.simpleMatch(leaderIndexPatterns, indexName); return Regex.simpleMatch(leaderIndexPatterns, indexName);
} }
public String getLeaderCluster() {
return leaderCluster;
}
public List<String> getLeaderIndexPatterns() { public List<String> getLeaderIndexPatterns() {
return leaderIndexPatterns; return leaderIndexPatterns;
} }
@ -299,6 +310,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(leaderCluster);
out.writeStringList(leaderIndexPatterns); out.writeStringList(leaderIndexPatterns);
out.writeOptionalString(followIndexPattern); out.writeOptionalString(followIndexPattern);
out.writeOptionalVInt(maxBatchOperationCount); out.writeOptionalVInt(maxBatchOperationCount);
@ -312,6 +324,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster);
builder.array(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns.toArray(new String[0])); builder.array(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns.toArray(new String[0]));
if (followIndexPattern != null) { if (followIndexPattern != null) {
builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexPattern); builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexPattern);
@ -350,7 +363,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
AutoFollowPattern that = (AutoFollowPattern) o; AutoFollowPattern that = (AutoFollowPattern) o;
return Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && return Objects.equals(leaderCluster, that.leaderCluster) &&
Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) &&
Objects.equals(followIndexPattern, that.followIndexPattern) && Objects.equals(followIndexPattern, that.followIndexPattern) &&
Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) &&
Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) &&
@ -364,6 +378,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable<MetaData.Custom> i
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash( return Objects.hash(
leaderCluster,
leaderIndexPatterns, leaderIndexPatterns,
followIndexPattern, followIndexPattern,
maxBatchOperationCount, maxBatchOperationCount,

View File

@ -48,6 +48,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
private static final ParseField NUMBER_OF_QUEUED_WRITES_FIELD = new ParseField("number_of_queued_writes"); private static final ParseField NUMBER_OF_QUEUED_WRITES_FIELD = new ParseField("number_of_queued_writes");
private static final ParseField MAPPING_VERSION_FIELD = new ParseField("mapping_version"); private static final ParseField MAPPING_VERSION_FIELD = new ParseField("mapping_version");
private static final ParseField TOTAL_FETCH_TIME_MILLIS_FIELD = new ParseField("total_fetch_time_millis"); private static final ParseField TOTAL_FETCH_TIME_MILLIS_FIELD = new ParseField("total_fetch_time_millis");
private static final ParseField TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD = new ParseField("total_fetch_leader_time_millis");
private static final ParseField NUMBER_OF_SUCCESSFUL_FETCHES_FIELD = new ParseField("number_of_successful_fetches"); private static final ParseField NUMBER_OF_SUCCESSFUL_FETCHES_FIELD = new ParseField("number_of_successful_fetches");
private static final ParseField NUMBER_OF_FAILED_FETCHES_FIELD = new ParseField("number_of_failed_fetches"); private static final ParseField NUMBER_OF_FAILED_FETCHES_FIELD = new ParseField("number_of_failed_fetches");
private static final ParseField OPERATIONS_RECEIVED_FIELD = new ParseField("operations_received"); private static final ParseField OPERATIONS_RECEIVED_FIELD = new ParseField("operations_received");
@ -87,12 +88,13 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
(long) args[19], (long) args[19],
(long) args[20], (long) args[20],
(long) args[21], (long) args[21],
(long) args[22],
new TreeMap<>( new TreeMap<>(
((List<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>>) args[22]) ((List<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>>) args[23])
.stream() .stream()
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
(long) args[23], (long) args[24],
(ElasticsearchException) args[24])); (ElasticsearchException) args[25]));
public static final String FETCH_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-fetch-exceptions-entry"; public static final String FETCH_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-fetch-exceptions-entry";
@ -116,6 +118,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_QUEUED_WRITES_FIELD); STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_QUEUED_WRITES_FIELD);
STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAPPING_VERSION_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAPPING_VERSION_FIELD);
STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_TIME_MILLIS_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_TIME_MILLIS_FIELD);
STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD);
STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_FETCHES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_FETCHES_FIELD);
STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_FETCHES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_FETCHES_FIELD);
STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_RECEIVED_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_RECEIVED_FIELD);
@ -228,6 +231,12 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
return totalFetchTimeMillis; return totalFetchTimeMillis;
} }
private final long totalFetchLeaderTimeMillis;
public long totalFetchLeaderTimeMillis() {
return totalFetchLeaderTimeMillis;
}
private final long numberOfSuccessfulFetches; private final long numberOfSuccessfulFetches;
public long numberOfSuccessfulFetches() { public long numberOfSuccessfulFetches() {
@ -309,6 +318,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
final int numberOfQueuedWrites, final int numberOfQueuedWrites,
final long mappingVersion, final long mappingVersion,
final long totalFetchTimeMillis, final long totalFetchTimeMillis,
final long totalFetchLeaderTimeMillis,
final long numberOfSuccessfulFetches, final long numberOfSuccessfulFetches,
final long numberOfFailedFetches, final long numberOfFailedFetches,
final long operationsReceived, final long operationsReceived,
@ -334,6 +344,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
this.numberOfQueuedWrites = numberOfQueuedWrites; this.numberOfQueuedWrites = numberOfQueuedWrites;
this.mappingVersion = mappingVersion; this.mappingVersion = mappingVersion;
this.totalFetchTimeMillis = totalFetchTimeMillis; this.totalFetchTimeMillis = totalFetchTimeMillis;
this.totalFetchLeaderTimeMillis = totalFetchLeaderTimeMillis;
this.numberOfSuccessfulFetches = numberOfSuccessfulFetches; this.numberOfSuccessfulFetches = numberOfSuccessfulFetches;
this.numberOfFailedFetches = numberOfFailedFetches; this.numberOfFailedFetches = numberOfFailedFetches;
this.operationsReceived = operationsReceived; this.operationsReceived = operationsReceived;
@ -362,6 +373,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
this.numberOfQueuedWrites = in.readVInt(); this.numberOfQueuedWrites = in.readVInt();
this.mappingVersion = in.readVLong(); this.mappingVersion = in.readVLong();
this.totalFetchTimeMillis = in.readVLong(); this.totalFetchTimeMillis = in.readVLong();
this.totalFetchLeaderTimeMillis = in.readVLong();
this.numberOfSuccessfulFetches = in.readVLong(); this.numberOfSuccessfulFetches = in.readVLong();
this.numberOfFailedFetches = in.readVLong(); this.numberOfFailedFetches = in.readVLong();
this.operationsReceived = in.readVLong(); this.operationsReceived = in.readVLong();
@ -397,6 +409,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
out.writeVInt(numberOfQueuedWrites); out.writeVInt(numberOfQueuedWrites);
out.writeVLong(mappingVersion); out.writeVLong(mappingVersion);
out.writeVLong(totalFetchTimeMillis); out.writeVLong(totalFetchTimeMillis);
out.writeVLong(totalFetchLeaderTimeMillis);
out.writeVLong(numberOfSuccessfulFetches); out.writeVLong(numberOfSuccessfulFetches);
out.writeVLong(numberOfFailedFetches); out.writeVLong(numberOfFailedFetches);
out.writeVLong(operationsReceived); out.writeVLong(operationsReceived);
@ -444,6 +457,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
TOTAL_FETCH_TIME_MILLIS_FIELD.getPreferredName(), TOTAL_FETCH_TIME_MILLIS_FIELD.getPreferredName(),
"total_fetch_time", "total_fetch_time",
new TimeValue(totalFetchTimeMillis, TimeUnit.MILLISECONDS)); new TimeValue(totalFetchTimeMillis, TimeUnit.MILLISECONDS));
builder.humanReadableField(
TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD.getPreferredName(),
"total_fetch_leader_time",
new TimeValue(totalFetchLeaderTimeMillis, TimeUnit.MILLISECONDS));
builder.field(NUMBER_OF_SUCCESSFUL_FETCHES_FIELD.getPreferredName(), numberOfSuccessfulFetches); builder.field(NUMBER_OF_SUCCESSFUL_FETCHES_FIELD.getPreferredName(), numberOfSuccessfulFetches);
builder.field(NUMBER_OF_FAILED_FETCHES_FIELD.getPreferredName(), numberOfFailedFetches); builder.field(NUMBER_OF_FAILED_FETCHES_FIELD.getPreferredName(), numberOfFailedFetches);
builder.field(OPERATIONS_RECEIVED_FIELD.getPreferredName(), operationsReceived); builder.field(OPERATIONS_RECEIVED_FIELD.getPreferredName(), operationsReceived);
@ -516,6 +533,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
numberOfQueuedWrites == that.numberOfQueuedWrites && numberOfQueuedWrites == that.numberOfQueuedWrites &&
mappingVersion == that.mappingVersion && mappingVersion == that.mappingVersion &&
totalFetchTimeMillis == that.totalFetchTimeMillis && totalFetchTimeMillis == that.totalFetchTimeMillis &&
totalFetchLeaderTimeMillis == that.totalFetchLeaderTimeMillis &&
numberOfSuccessfulFetches == that.numberOfSuccessfulFetches && numberOfSuccessfulFetches == that.numberOfSuccessfulFetches &&
numberOfFailedFetches == that.numberOfFailedFetches && numberOfFailedFetches == that.numberOfFailedFetches &&
operationsReceived == that.operationsReceived && operationsReceived == that.operationsReceived &&
@ -552,6 +570,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
numberOfQueuedWrites, numberOfQueuedWrites,
mappingVersion, mappingVersion,
totalFetchTimeMillis, totalFetchTimeMillis,
totalFetchLeaderTimeMillis,
numberOfSuccessfulFetches, numberOfSuccessfulFetches,
numberOfFailedFetches, numberOfFailedFetches,
operationsReceived, operationsReceived,

View File

@ -33,35 +33,35 @@ public class DeleteAutoFollowPatternAction extends Action<AcknowledgedResponse>
public static class Request extends AcknowledgedRequest<Request> { public static class Request extends AcknowledgedRequest<Request> {
private String leaderCluster; private String name;
@Override @Override
public ActionRequestValidationException validate() { public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null; ActionRequestValidationException validationException = null;
if (leaderCluster == null) { if (name == null) {
validationException = addValidationError("leaderCluster is missing", validationException); validationException = addValidationError("name is missing", validationException);
} }
return validationException; return validationException;
} }
public String getLeaderCluster() { public String getName() {
return leaderCluster; return name;
} }
public void setLeaderCluster(String leaderCluster) { public void setName(String name) {
this.leaderCluster = leaderCluster; this.name = name;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
leaderCluster = in.readString(); name = in.readString();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(leaderCluster); out.writeString(name);
} }
@Override @Override
@ -69,12 +69,12 @@ public class DeleteAutoFollowPatternAction extends Action<AcknowledgedResponse>
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o; Request request = (Request) o;
return Objects.equals(leaderCluster, request.leaderCluster); return Objects.equals(name, request.name);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(leaderCluster); return Objects.hash(name);
} }
} }

View File

@ -36,14 +36,14 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
public static class Request extends MasterNodeReadRequest<Request> { public static class Request extends MasterNodeReadRequest<Request> {
private String leaderCluster; private String name;
public Request() { public Request() {
} }
public Request(StreamInput in) throws IOException { public Request(StreamInput in) throws IOException {
super(in); super(in);
this.leaderCluster = in.readOptionalString(); this.name = in.readOptionalString();
} }
@Override @Override
@ -51,18 +51,18 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
return null; return null;
} }
public String getLeaderCluster() { public String getName() {
return leaderCluster; return name;
} }
public void setLeaderCluster(String leaderCluster) { public void setName(String name) {
this.leaderCluster = leaderCluster; this.name = name;
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeOptionalString(leaderCluster); out.writeOptionalString(name);
} }
@Override @Override
@ -70,12 +70,12 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o; Request request = (Request) o;
return Objects.equals(leaderCluster, request.leaderCluster); return Objects.equals(name, request.name);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(leaderCluster); return Objects.hash(name);
} }
} }

View File

@ -46,8 +46,11 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
private static final ObjectParser<Request, String> PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new); private static final ObjectParser<Request, String> PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new);
private static final ParseField NAME_FIELD = new ParseField("name");
static { static {
PARSER.declareString(Request::setLeaderCluster, LEADER_CLUSTER_FIELD); PARSER.declareString(Request::setName, NAME_FIELD);
PARSER.declareString(Request::setLeaderCluster, AutoFollowPattern.LEADER_CLUSTER_FIELD);
PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD); PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD);
PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD); PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD);
PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT);
@ -67,20 +70,21 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
AutoFollowPattern.POLL_TIMEOUT, ObjectParser.ValueType.STRING); AutoFollowPattern.POLL_TIMEOUT, ObjectParser.ValueType.STRING);
} }
public static Request fromXContent(XContentParser parser, String remoteClusterAlias) throws IOException { public static Request fromXContent(XContentParser parser, String name) throws IOException {
Request request = PARSER.parse(parser, null); Request request = PARSER.parse(parser, null);
if (remoteClusterAlias != null) { if (name != null) {
if (request.leaderCluster == null) { if (request.name == null) {
request.leaderCluster = remoteClusterAlias; request.name = name;
} else { } else {
if (request.leaderCluster.equals(remoteClusterAlias) == false) { if (request.name.equals(name) == false) {
throw new IllegalArgumentException("provided leaderCluster is not equal"); throw new IllegalArgumentException("provided name is not equal");
} }
} }
} }
return request; return request;
} }
private String name;
private String leaderCluster; private String leaderCluster;
private List<String> leaderIndexPatterns; private List<String> leaderIndexPatterns;
private String followIndexNamePattern; private String followIndexNamePattern;
@ -96,8 +100,11 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
@Override @Override
public ActionRequestValidationException validate() { public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null; ActionRequestValidationException validationException = null;
if (name == null) {
validationException = addValidationError("[" + NAME_FIELD.getPreferredName() + "] is missing", validationException);
}
if (leaderCluster == null) { if (leaderCluster == null) {
validationException = addValidationError("[" + LEADER_CLUSTER_FIELD.getPreferredName() + validationException = addValidationError("[" + AutoFollowPattern.LEADER_CLUSTER_FIELD.getPreferredName() +
"] is missing", validationException); "] is missing", validationException);
} }
if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) {
@ -120,6 +127,14 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
return validationException; return validationException;
} }
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getLeaderCluster() { public String getLeaderCluster() {
return leaderCluster; return leaderCluster;
} }
@ -203,6 +218,7 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
name = in.readString();
leaderCluster = in.readString(); leaderCluster = in.readString();
leaderIndexPatterns = in.readList(StreamInput::readString); leaderIndexPatterns = in.readList(StreamInput::readString);
followIndexNamePattern = in.readOptionalString(); followIndexNamePattern = in.readOptionalString();
@ -218,6 +234,7 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(name);
out.writeString(leaderCluster); out.writeString(leaderCluster);
out.writeStringList(leaderIndexPatterns); out.writeStringList(leaderIndexPatterns);
out.writeOptionalString(followIndexNamePattern); out.writeOptionalString(followIndexNamePattern);
@ -234,7 +251,8 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
{ {
builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); builder.field(NAME_FIELD.getPreferredName(), name);
builder.field(AutoFollowPattern.LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster);
builder.field(AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); builder.field(AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns);
if (followIndexNamePattern != null) { if (followIndexNamePattern != null) {
builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern);
@ -270,7 +288,8 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o; Request request = (Request) o;
return Objects.equals(leaderCluster, request.leaderCluster) && return Objects.equals(name, request.name) &&
Objects.equals(leaderCluster, request.leaderCluster) &&
Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) &&
Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && Objects.equals(followIndexNamePattern, request.followIndexNamePattern) &&
Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) &&
@ -285,7 +304,8 @@ public class PutAutoFollowPatternAction extends Action<AcknowledgedResponse> {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash( return Objects.hash(
leaderCluster, name,
leaderCluster,
leaderIndexPatterns, leaderIndexPatterns,
followIndexNamePattern, followIndexNamePattern,
maxBatchOperationCount, maxBatchOperationCount,

View File

@ -7,22 +7,29 @@ package org.elasticsearch.xpack.core.rollup.action;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.tasks.BaseTasksRequest;
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.RollupField;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects; import java.util.Objects;
public class DeleteRollupJobAction extends Action<AcknowledgedResponse> { public class DeleteRollupJobAction extends Action<DeleteRollupJobAction.Response> {
public static final DeleteRollupJobAction INSTANCE = new DeleteRollupJobAction(); public static final DeleteRollupJobAction INSTANCE = new DeleteRollupJobAction();
public static final String NAME = "cluster:admin/xpack/rollup/delete"; public static final String NAME = "cluster:admin/xpack/rollup/delete";
@ -32,11 +39,11 @@ public class DeleteRollupJobAction extends Action<AcknowledgedResponse> {
} }
@Override @Override
public AcknowledgedResponse newResponse() { public Response newResponse() {
return new AcknowledgedResponse(); return new Response();
} }
public static class Request extends AcknowledgedRequest<Request> implements ToXContent { public static class Request extends BaseTasksRequest<Request> implements ToXContentFragment {
private String id; private String id;
public Request(String id) { public Request(String id) {
@ -45,6 +52,11 @@ public class DeleteRollupJobAction extends Action<AcknowledgedResponse> {
public Request() {} public Request() {}
@Override
public boolean match(Task task) {
return task.getDescription().equals(RollupField.NAME + "_" + id);
}
public String getId() { public String getId() {
return id; return id;
} }
@ -90,10 +102,74 @@ public class DeleteRollupJobAction extends Action<AcknowledgedResponse> {
} }
} }
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, AcknowledgedResponse, RequestBuilder> { public static class RequestBuilder extends ActionRequestBuilder<DeleteRollupJobAction.Request, DeleteRollupJobAction.Response> {
protected RequestBuilder(ElasticsearchClient client, DeleteRollupJobAction action) { protected RequestBuilder(ElasticsearchClient client, DeleteRollupJobAction action) {
super(client, action, new Request()); super(client, action, new DeleteRollupJobAction.Request());
}
}
public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject {
private boolean acknowledged;
public Response(StreamInput in) throws IOException {
super(Collections.emptyList(), Collections.emptyList());
readFrom(in);
}
public Response(boolean acknowledged, List<TaskOperationFailure> taskFailures, List<FailedNodeException> nodeFailures) {
super(taskFailures, nodeFailures);
this.acknowledged = acknowledged;
}
public Response(boolean acknowledged) {
super(Collections.emptyList(), Collections.emptyList());
this.acknowledged = acknowledged;
}
public Response() {
super(Collections.emptyList(), Collections.emptyList());
this.acknowledged = false;
}
public boolean isDeleted() {
return acknowledged;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
acknowledged = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(acknowledged);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
toXContentCommon(builder, params);
builder.field("acknowledged", acknowledged);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteRollupJobAction.Response response = (DeleteRollupJobAction.Response) o;
return super.equals(o) && acknowledged == response.acknowledged;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), acknowledged);
} }
} }
} }

View File

@ -971,6 +971,9 @@
"total_fetch_time_millis": { "total_fetch_time_millis": {
"type": "long" "type": "long"
}, },
"total_fetch_leader_time_millis": {
"type": "long"
},
"number_of_successful_fetches": { "number_of_successful_fetches": {
"type": "long" "type": "long"
}, },

View File

@ -5,103 +5,101 @@
*/ */
package org.elasticsearch.xpack.rollup.action; package org.elasticsearch.xpack.rollup.action;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction;
import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus;
import org.elasticsearch.xpack.rollup.job.RollupJobTask;
import java.util.Objects; import java.io.IOException;
import java.util.concurrent.TimeUnit; import java.util.List;
public class TransportDeleteRollupJobAction public class TransportDeleteRollupJobAction extends TransportTasksAction<RollupJobTask, DeleteRollupJobAction.Request,
extends TransportMasterNodeAction<DeleteRollupJobAction.Request, AcknowledgedResponse> { DeleteRollupJobAction.Response, DeleteRollupJobAction.Response> {
private final PersistentTasksService persistentTasksService;
@Inject @Inject
public TransportDeleteRollupJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, public TransportDeleteRollupJobAction(Settings settings, TransportService transportService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, ClusterService clusterService) {
PersistentTasksService persistentTasksService, ClusterService clusterService) { super(settings, DeleteRollupJobAction.NAME, clusterService, transportService, actionFilters,
super(settings, DeleteRollupJobAction.NAME, transportService, clusterService, threadPool, actionFilters, DeleteRollupJobAction.Request::new, DeleteRollupJobAction.Response::new, ThreadPool.Names.SAME);
indexNameExpressionResolver, DeleteRollupJobAction.Request::new);
this.persistentTasksService = persistentTasksService;
} }
@Override @Override
protected String executor() { protected void doExecute(Task task, DeleteRollupJobAction.Request request, ActionListener<DeleteRollupJobAction.Response> listener) {
return ThreadPool.Names.SAME; final ClusterState state = clusterService.state();
} final DiscoveryNodes nodes = state.nodes();
@Override if (nodes.isLocalNodeElectedMaster()) {
protected AcknowledgedResponse newResponse() { PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE);
return new AcknowledgedResponse(); if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) {
} super.doExecute(task, request, listener);
} else {
@Override // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this call,
protected void masterOperation(DeleteRollupJobAction.Request request, ClusterState state, // no need to go looking for the allocated task
ActionListener<AcknowledgedResponse> listener) throws Exception { listener.onFailure(new ResourceNotFoundException("the task with id [" + request.getId() + "] doesn't exist"));
String jobId = request.getId();
TimeValue timeout = new TimeValue(60, TimeUnit.SECONDS); // TODO make this a config option
// Step 1. Cancel the persistent task
persistentTasksService.sendRemoveRequest(jobId, new ActionListener<PersistentTasksCustomMetaData.PersistentTask<?>>() {
@Override
public void onResponse(PersistentTasksCustomMetaData.PersistentTask<?> persistentTask) {
logger.debug("Request to cancel Task for Rollup job [" + jobId + "] successful.");
// Step 2. Wait for the task to finish cancellation internally
persistentTasksService.waitForPersistentTaskCondition(jobId, Objects::isNull, timeout,
new PersistentTasksService.WaitForPersistentTaskListener<RollupJob>() {
@Override
public void onResponse(PersistentTasksCustomMetaData.PersistentTask<RollupJob> task) {
logger.debug("Task for Rollup job [" + jobId + "] successfully canceled.");
listener.onResponse(new AcknowledgedResponse(true));
}
@Override
public void onFailure(Exception e) {
logger.error("Error while cancelling task for Rollup job [" + jobId
+ "]." + e);
listener.onFailure(e);
}
@Override
public void onTimeout(TimeValue timeout) {
String msg = "Stopping of Rollup job [" + jobId + "] timed out after [" + timeout + "].";
logger.warn(msg);
listener.onFailure(new ElasticsearchException(msg));
}
});
} }
@Override } else {
public void onFailure(Exception e) { // Delegates DeleteJob to elected master node, so it becomes the coordinating node.
logger.error("Error while requesting to cancel task for Rollup job [" + jobId // Non-master nodes may have a stale cluster state that shows jobs which are cancelled
+ "]" + e); // on the master, which makes testing difficult.
listener.onFailure(e); if (nodes.getMasterNode() == null) {
listener.onFailure(new MasterNotDiscoveredException("no known master nodes"));
} else {
transportService.sendRequest(nodes.getMasterNode(), actionName, request,
new ActionListenerResponseHandler<>(listener, DeleteRollupJobAction.Response::new));
} }
}); }
} }
@Override @Override
protected ClusterBlockException checkBlock(DeleteRollupJobAction.Request request, ClusterState state) { protected void taskOperation(DeleteRollupJobAction.Request request, RollupJobTask jobTask,
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); ActionListener<DeleteRollupJobAction.Response> listener) {
assert jobTask.getConfig().getId().equals(request.getId());
IndexerState state = ((RollupJobStatus) jobTask.getStatus()).getIndexerState();
if (state.equals(IndexerState.STOPPED) ) {
jobTask.onCancelled();
listener.onResponse(new DeleteRollupJobAction.Response(true));
} else {
listener.onFailure(new IllegalStateException("Could not delete job [" + request.getId() + "] because " +
"indexer state is [" + state + "]. Job must be [" + IndexerState.STOPPED + "] before deletion."));
}
}
@Override
protected DeleteRollupJobAction.Response newResponse(DeleteRollupJobAction.Request request, List<DeleteRollupJobAction.Response> tasks,
List<TaskOperationFailure> taskOperationFailures,
List<FailedNodeException> failedNodeExceptions) {
// There should theoretically only be one task running the rollup job
// If there are more, in production it should be ok as long as they are acknowledge shutting down.
// But in testing we'd like to know there were more than one hence the assert
assert tasks.size() + taskOperationFailures.size() == 1;
boolean cancelled = tasks.size() > 0 && tasks.stream().allMatch(DeleteRollupJobAction.Response::isDeleted);
return new DeleteRollupJobAction.Response(cancelled, taskOperationFailures, failedNodeExceptions);
}
@Override
protected DeleteRollupJobAction.Response readTaskResponse(StreamInput in) throws IOException {
DeleteRollupJobAction.Response response = new DeleteRollupJobAction.Response();
response.readFrom(in);
return response;
} }
} }

View File

@ -349,7 +349,7 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
* shut down from the inside. * shut down from the inside.
*/ */
@Override @Override
protected synchronized void onCancelled() { public synchronized void onCancelled() {
logger.info("Received cancellation request for Rollup job [" + job.getConfig().getId() + "], state: [" + indexer.getState() + "]"); logger.info("Received cancellation request for Rollup job [" + job.getConfig().getId() + "], state: [" + indexer.getState() + "]");
if (indexer.abort()) { if (indexer.abort()) {
// there is no background job running, we can shutdown safely // there is no background job running, we can shutdown safely

View File

@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction;
import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.rollup.Rollup;
@ -31,7 +32,16 @@ public class RestDeleteRollupJobAction extends BaseRestHandler {
String id = restRequest.param(ID.getPreferredName()); String id = restRequest.param(ID.getPreferredName());
DeleteRollupJobAction.Request request = new DeleteRollupJobAction.Request(id); DeleteRollupJobAction.Request request = new DeleteRollupJobAction.Request(id);
return channel -> client.execute(DeleteRollupJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); return channel -> client.execute(DeleteRollupJobAction.INSTANCE, request,
new RestToXContentListener<DeleteRollupJobAction.Response>(channel) {
@Override
protected RestStatus getStatus(DeleteRollupJobAction.Response response) {
if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) {
return RestStatus.INTERNAL_SERVER_ERROR;
}
return RestStatus.OK;
}
});
} }
@Override @Override

View File

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
@ -26,7 +26,7 @@ if (project.inFipsJvm) {
test.enabled = false test.enabled = false
// Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are
// not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS.
tasks.withType(ForbiddenApisCliTask) { tasks.withType(CheckForbiddenApis) {
bundledSignatures -= "jdk-non-portable" bundledSignatures -= "jdk-non-portable"
} }
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,

View File

@ -46,9 +46,9 @@ import static org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResol
class IndicesAndAliasesResolver { class IndicesAndAliasesResolver {
//`*,-*` what we replace indices with if we need Elasticsearch to return empty responses without throwing exception //`*,-*` what we replace indices and aliases with if we need Elasticsearch to return empty responses without throwing exception
private static final String[] NO_INDICES_ARRAY = new String[] { "*", "-*" }; static final String[] NO_INDICES_OR_ALIASES_ARRAY = new String[] { "*", "-*" };
static final List<String> NO_INDICES_LIST = Arrays.asList(NO_INDICES_ARRAY); static final List<String> NO_INDICES_OR_ALIASES_LIST = Arrays.asList(NO_INDICES_OR_ALIASES_ARRAY);
private final IndexNameExpressionResolver nameExpressionResolver; private final IndexNameExpressionResolver nameExpressionResolver;
private final RemoteClusterResolver remoteClusterResolver; private final RemoteClusterResolver remoteClusterResolver;
@ -165,7 +165,7 @@ class IndicesAndAliasesResolver {
//this is how we tell es core to return an empty response, we can let the request through being sure //this is how we tell es core to return an empty response, we can let the request through being sure
//that the '-*' wildcard expression will be resolved to no indices. We can't let empty indices through //that the '-*' wildcard expression will be resolved to no indices. We can't let empty indices through
//as that would be resolved to _all by es core. //as that would be resolved to _all by es core.
replaceable.indices(NO_INDICES_ARRAY); replaceable.indices(NO_INDICES_OR_ALIASES_ARRAY);
indicesReplacedWithNoIndices = true; indicesReplacedWithNoIndices = true;
resolvedIndicesBuilder.addLocal(NO_INDEX_PLACEHOLDER); resolvedIndicesBuilder.addLocal(NO_INDEX_PLACEHOLDER);
} else { } else {
@ -176,8 +176,6 @@ class IndicesAndAliasesResolver {
} }
} else { } else {
if (containsWildcards(indicesRequest)) { if (containsWildcards(indicesRequest)) {
//an alias can still contain '*' in its name as of 5.0. Such aliases cannot be referred to when using
//the security plugin, otherwise the following exception gets thrown
throw new IllegalStateException("There are no external requests known to support wildcards that don't support replacing " + throw new IllegalStateException("There are no external requests known to support wildcards that don't support replacing " +
"their indices"); "their indices");
} }
@ -198,8 +196,6 @@ class IndicesAndAliasesResolver {
if (aliasesRequest.expandAliasesWildcards()) { if (aliasesRequest.expandAliasesWildcards()) {
List<String> aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(), List<String> aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(),
loadAuthorizedAliases(authorizedIndices.get(), metaData)); loadAuthorizedAliases(authorizedIndices.get(), metaData));
//it may be that we replace aliases with an empty array, in case there are no authorized aliases for the action.
//MetaData#findAliases will return nothing when some alias was originally requested, which was replaced with empty.
aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()])); aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()]));
} }
if (indicesReplacedWithNoIndices) { if (indicesReplacedWithNoIndices) {
@ -213,6 +209,13 @@ class IndicesAndAliasesResolver {
} else { } else {
resolvedIndicesBuilder.addLocal(aliasesRequest.aliases()); resolvedIndicesBuilder.addLocal(aliasesRequest.aliases());
} }
// if no aliases are authorized, then fill in an expression that
// MetaData#findAliases evaluates to the empty alias list. You cannot put
// "nothing" (the empty list) explicitly because this is resolved by es core to
// _all
if (aliasesRequest.aliases().length == 0) {
aliasesRequest.replaceAliases(NO_INDICES_OR_ALIASES_ARRAY);
}
} }
return resolvedIndicesBuilder.build(); return resolvedIndicesBuilder.build();
} }

View File

@ -818,7 +818,7 @@ public class AuthorizationServiceTests extends ESTestCase {
final SearchRequest searchRequest = new SearchRequest("_all"); final SearchRequest searchRequest = new SearchRequest("_all");
authorize(authentication, SearchAction.NAME, searchRequest); authorize(authentication, SearchAction.NAME, searchRequest);
assertEquals(2, searchRequest.indices().length); assertEquals(2, searchRequest.indices().length);
assertEquals(IndicesAndAliasesResolver.NO_INDICES_LIST, Arrays.asList(searchRequest.indices())); assertEquals(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_LIST, Arrays.asList(searchRequest.indices()));
} }
public void testGrantedNonXPackUserCanExecuteMonitoringOperationsAgainstSecurityIndex() { public void testGrantedNonXPackUserCanExecuteMonitoringOperationsAgainstSecurityIndex() {

Some files were not shown because too many files have changed in this diff Show More