mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-27 10:28:28 +00:00
Merge remote-tracking branch 'elastic/master' into zen2
This commit is contained in:
commit
b20497560c
@ -33,7 +33,7 @@ dependencies {
|
||||
exclude group: 'net.sf.jopt-simple', module: 'jopt-simple'
|
||||
}
|
||||
compile "org.openjdk.jmh:jmh-core:$versions.jmh"
|
||||
compile "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
|
||||
annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
|
||||
// Dependencies of JMH
|
||||
runtime 'net.sf.jopt-simple:jopt-simple:4.6'
|
||||
runtime 'org.apache.commons:commons-math3:3.2'
|
||||
|
@ -24,13 +24,16 @@ import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionCollection
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.gradle.api.tasks.options.Option
|
||||
import org.gradle.util.GradleVersion
|
||||
import org.gradle.util.DistributionLocator
|
||||
import org.gradle.plugins.ide.eclipse.model.SourceFolder
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
|
||||
import java.util.function.Predicate
|
||||
|
||||
plugins {
|
||||
id 'com.gradle.build-scan' version '1.13.2'
|
||||
id 'com.gradle.build-scan' version '2.0.2'
|
||||
id 'base'
|
||||
}
|
||||
if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") {
|
||||
@ -527,7 +530,7 @@ allprojects {
|
||||
class Run extends DefaultTask {
|
||||
boolean debug = false
|
||||
|
||||
@org.gradle.api.internal.tasks.options.Option(
|
||||
@Option(
|
||||
option = "debug-jvm",
|
||||
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
|
||||
)
|
||||
|
@ -147,10 +147,8 @@ if (project == rootProject) {
|
||||
mavenLocal()
|
||||
}
|
||||
}
|
||||
test {
|
||||
include "**/*Tests.class"
|
||||
exclude "**/*IT.class"
|
||||
}
|
||||
// only run tests as build-tools
|
||||
test.enabled = false
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
@ -180,9 +178,17 @@ if (project != rootProject) {
|
||||
jarHell.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
// tests can't be run with randomized test runner
|
||||
// it's fine as we run them as part of :buildSrc
|
||||
test.enabled = false
|
||||
test {
|
||||
include "**/*Tests.class"
|
||||
exclude "**/*IT.class"
|
||||
// The test task is configured to runtimeJava version, but we build-tools doesn't support all of them, so test
|
||||
// with compiler instead on the ones that are too old.
|
||||
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_10) {
|
||||
jvm = "${project.compilerJavaHome}/bin/java"
|
||||
}
|
||||
}
|
||||
|
||||
// This can't be an RandomizedTestingTask because we can't yet reference it
|
||||
task integTest(type: Test) {
|
||||
// integration test requires the local testing repo for example plugin builds
|
||||
dependsOn project.rootProject.allprojects.collect {
|
||||
|
@ -125,7 +125,7 @@ class TestProgressLogger implements AggregatedEventListener {
|
||||
|
||||
@Subscribe
|
||||
void onTestResult(AggregatedTestResultEvent e) throws IOException {
|
||||
final String statusMessage
|
||||
String statusMessage
|
||||
testsCompleted++
|
||||
switch (e.status) {
|
||||
case ERROR:
|
||||
|
@ -69,7 +69,7 @@ class BuildPlugin implements Plugin<Project> {
|
||||
+ 'elasticsearch.standalone-rest-test, and elasticsearch.build '
|
||||
+ 'are mutually exclusive')
|
||||
}
|
||||
final String minimumGradleVersion
|
||||
String minimumGradleVersion = null
|
||||
InputStream is = getClass().getResourceAsStream("/minimumGradleVersion")
|
||||
try { minimumGradleVersion = IOUtils.toString(is, StandardCharsets.UTF_8.toString()) } finally { is.close() }
|
||||
if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion.trim())) {
|
||||
@ -233,7 +233,7 @@ class BuildPlugin implements Plugin<Project> {
|
||||
}
|
||||
|
||||
private static String findCompilerJavaHome() {
|
||||
final String compilerJavaHome = System.getenv('JAVA_HOME')
|
||||
String compilerJavaHome = System.getenv('JAVA_HOME')
|
||||
final String compilerJavaProperty = System.getProperty('compiler.java')
|
||||
if (compilerJavaProperty != null) {
|
||||
compilerJavaHome = findJavaHome(compilerJavaProperty)
|
||||
@ -770,13 +770,26 @@ class BuildPlugin implements Plugin<Project> {
|
||||
}
|
||||
|
||||
static void applyCommonTestConfig(Project project) {
|
||||
project.tasks.withType(RandomizedTestingTask) {
|
||||
project.tasks.withType(RandomizedTestingTask) {task ->
|
||||
jvm "${project.runtimeJavaHome}/bin/java"
|
||||
parallelism System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel)
|
||||
ifNoTests System.getProperty('tests.ifNoTests', 'fail')
|
||||
onNonEmptyWorkDirectory 'wipe'
|
||||
leaveTemporary true
|
||||
|
||||
// Make sure all test tasks are configured properly
|
||||
if (name != "test") {
|
||||
project.tasks.matching { it.name == "test"}.all { testTask ->
|
||||
task.testClassesDirs = testTask.testClassesDirs
|
||||
task.classpath = testTask.classpath
|
||||
task.shouldRunAfter testTask
|
||||
}
|
||||
}
|
||||
// no loose ends: check has to depend on all test tasks
|
||||
project.tasks.matching {it.name == "check"}.all {
|
||||
dependsOn(task)
|
||||
}
|
||||
|
||||
// TODO: why are we not passing maxmemory to junit4?
|
||||
jvmArg '-Xmx' + System.getProperty('tests.heap.size', '512m')
|
||||
jvmArg '-Xms' + System.getProperty('tests.heap.size', '512m')
|
||||
|
@ -129,7 +129,6 @@ public class PluginBuildPlugin extends BuildPlugin {
|
||||
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||
integTest.mustRunAfter(project.precommit, project.test)
|
||||
project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip')
|
||||
project.check.dependsOn(integTest)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -190,8 +190,7 @@ class PrecommitTasks {
|
||||
Task checkstyleTask = project.tasks.create('checkstyle')
|
||||
// Apply the checkstyle plugin to create `checkstyleMain` and `checkstyleTest`. It only
|
||||
// creates them if there is main or test code to check and it makes `check` depend
|
||||
// on them. But we want `precommit` to depend on `checkstyle` which depends on them so
|
||||
// we have to swap them.
|
||||
// on them. We also want `precommit` to depend on `checkstyle`.
|
||||
project.pluginManager.apply('checkstyle')
|
||||
project.checkstyle {
|
||||
config = project.resources.text.fromFile(checkstyleConf, 'UTF-8')
|
||||
@ -202,7 +201,6 @@ class PrecommitTasks {
|
||||
}
|
||||
|
||||
project.tasks.withType(Checkstyle) { task ->
|
||||
project.tasks[JavaBasePlugin.CHECK_TASK_NAME].dependsOn.remove(task)
|
||||
checkstyleTask.dependsOn(task)
|
||||
task.dependsOn(copyCheckstyleConf)
|
||||
task.inputs.file(checkstyleSuppressions)
|
||||
|
@ -111,8 +111,8 @@ class ClusterFormationTasks {
|
||||
for (int i = 0; i < config.numNodes; i++) {
|
||||
// we start N nodes and out of these N nodes there might be M bwc nodes.
|
||||
// for each of those nodes we might have a different configuration
|
||||
final Configuration distro
|
||||
final String elasticsearchVersion
|
||||
Configuration distro
|
||||
String elasticsearchVersion
|
||||
if (i < config.numBwcNodes) {
|
||||
elasticsearchVersion = config.bwcVersion.toString()
|
||||
if (project.bwcVersions.unreleased.contains(config.bwcVersion)) {
|
||||
@ -595,7 +595,7 @@ class ClusterFormationTasks {
|
||||
}
|
||||
|
||||
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, String pluginName, String prefix) {
|
||||
final FileCollection pluginZip;
|
||||
FileCollection pluginZip;
|
||||
if (node.nodeVersion != Version.fromString(VersionProperties.elasticsearch)) {
|
||||
pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, pluginName))
|
||||
} else {
|
||||
|
@ -25,12 +25,12 @@ import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.execution.TaskExecutionAdapter
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.api.provider.Property
|
||||
import org.gradle.api.provider.Provider
|
||||
import org.gradle.api.tasks.Copy
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.TaskState
|
||||
import org.gradle.api.tasks.options.Option
|
||||
import org.gradle.plugins.ide.idea.IdeaPlugin
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
@ -53,7 +53,7 @@ public class RestIntegTestTask extends DefaultTask {
|
||||
|
||||
/** Flag indicating whether the rest tests in the rest spec should be run. */
|
||||
@Input
|
||||
Property<Boolean> includePackaged = project.objects.property(Boolean)
|
||||
Boolean includePackaged = false
|
||||
|
||||
public RestIntegTestTask() {
|
||||
runner = project.tasks.create("${name}Runner", RandomizedTestingTask.class)
|
||||
@ -109,7 +109,7 @@ public class RestIntegTestTask extends DefaultTask {
|
||||
}
|
||||
|
||||
// copy the rest spec/tests into the test resources
|
||||
Task copyRestSpec = createCopyRestSpecTask(project, includePackaged)
|
||||
Task copyRestSpec = createCopyRestSpecTask()
|
||||
runner.dependsOn(copyRestSpec)
|
||||
|
||||
// this must run after all projects have been configured, so we know any project
|
||||
@ -130,7 +130,7 @@ public class RestIntegTestTask extends DefaultTask {
|
||||
|
||||
/** Sets the includePackaged property */
|
||||
public void includePackaged(boolean include) {
|
||||
includePackaged.set(include)
|
||||
includePackaged = include
|
||||
}
|
||||
|
||||
@Option(
|
||||
@ -215,7 +215,7 @@ public class RestIntegTestTask extends DefaultTask {
|
||||
* @param project The project to add the copy task to
|
||||
* @param includePackagedTests true if the packaged tests should be copied, false otherwise
|
||||
*/
|
||||
static Task createCopyRestSpecTask(Project project, Provider<Boolean> includePackagedTests) {
|
||||
Task createCopyRestSpecTask() {
|
||||
project.configurations {
|
||||
restSpec
|
||||
}
|
||||
@ -237,7 +237,7 @@ public class RestIntegTestTask extends DefaultTask {
|
||||
project.afterEvaluate {
|
||||
copyRestSpec.from({ project.zipTree(project.configurations.restSpec.singleFile) }) {
|
||||
include 'rest-api-spec/api/**'
|
||||
if (includePackagedTests.get()) {
|
||||
if (includePackaged) {
|
||||
include 'rest-api-spec/test/**'
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ package org.elasticsearch.gradle.test
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.api.tasks.options.Option
|
||||
import org.gradle.util.ConfigureUtil
|
||||
|
||||
public class RunTask extends DefaultTask {
|
||||
|
@ -464,9 +464,9 @@ class VagrantTestPlugin implements Plugin<Project> {
|
||||
* execution.
|
||||
*/
|
||||
final String vagrantDestroyProperty = project.getProperties().get('vagrant.destroy', 'true')
|
||||
final boolean vagrantDestroy
|
||||
boolean vagrantDestroy
|
||||
if ("true".equals(vagrantDestroyProperty)) {
|
||||
vagrantDestroy = true;
|
||||
vagrantDestroy = true
|
||||
} else if ("false".equals(vagrantDestroyProperty)) {
|
||||
vagrantDestroy = false
|
||||
} else {
|
||||
|
@ -56,9 +56,8 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
|
||||
private DirectoryProperty outputDir;
|
||||
|
||||
public ExportElasticsearchBuildResourcesTask() {
|
||||
outputDir = getProject().getLayout().directoryProperty(
|
||||
getProject().getLayout().getBuildDirectory().dir("build-tools-exported")
|
||||
);
|
||||
outputDir = getProject().getObjects().directoryProperty();
|
||||
outputDir.set(new File(getProject().getBuildDir(), "build-tools-exported"));
|
||||
}
|
||||
|
||||
@OutputDirectory
|
||||
|
@ -20,15 +20,20 @@ package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.elasticsearch.gradle.tool.Boilerplate;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.api.tasks.testing.Test;
|
||||
import org.gradle.api.tasks.util.PatternFilterable;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.MalformedURLException;
|
||||
@ -40,8 +45,9 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
@ -57,7 +63,7 @@ public class TestingConventionsTasks extends DefaultTask {
|
||||
*/
|
||||
private Boolean activeTestsExists;
|
||||
|
||||
private List<String> testClassNames;
|
||||
private Map<String, File> testClassNames;
|
||||
|
||||
public TestingConventionsTasks() {
|
||||
setDescription("Tests various testing conventions");
|
||||
@ -68,56 +74,155 @@ public class TestingConventionsTasks extends DefaultTask {
|
||||
@TaskAction
|
||||
public void doCheck() throws IOException {
|
||||
activeTestsExists = false;
|
||||
final List<String> problems;
|
||||
final String problems;
|
||||
|
||||
try (URLClassLoader isolatedClassLoader = new URLClassLoader(
|
||||
getTestsClassPath().getFiles().stream().map(this::fileToUrl).toArray(URL[]::new)
|
||||
)) {
|
||||
List<? extends Class<?>> classes = getTestClassNames().stream()
|
||||
.map(name -> loadClassWithoutInitializing(name, isolatedClassLoader))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
Predicate<Class<?>> isStaticClass = clazz -> Modifier.isStatic(clazz.getModifiers());
|
||||
Predicate<Class<?>> isPublicClass = clazz -> Modifier.isPublic(clazz.getModifiers());
|
||||
Predicate<Class<?>> implementsNamingConvention = clazz -> clazz.getName().endsWith(TEST_CLASS_SUFIX) ||
|
||||
clazz.getName().endsWith(INTEG_TEST_CLASS_SUFIX);
|
||||
Predicate<Class<?>> implementsNamingConvention = clazz ->
|
||||
clazz.getName().endsWith(TEST_CLASS_SUFIX) ||
|
||||
clazz.getName().endsWith(INTEG_TEST_CLASS_SUFIX);
|
||||
|
||||
problems = Stream.concat(
|
||||
Map<File, ? extends Class<?>> classes = getTestClassNames().entrySet().stream()
|
||||
.collect(Collectors.toMap(
|
||||
Map.Entry::getValue,
|
||||
entry -> loadClassWithoutInitializing(entry.getKey(), isolatedClassLoader))
|
||||
);
|
||||
|
||||
FileTree allTestClassFiles = getProject().files(
|
||||
classes.values().stream()
|
||||
.filter(isStaticClass.negate())
|
||||
.filter(isPublicClass)
|
||||
.filter(implementsNamingConvention)
|
||||
.map(clazz -> testClassNames.get(clazz.getName()))
|
||||
.collect(Collectors.toList())
|
||||
).getAsFileTree();
|
||||
|
||||
final Map<String, Set<File>> classFilesPerRandomizedTestingTask = classFilesPerRandomizedTestingTask(allTestClassFiles);
|
||||
final Map<String, Set<File>> classFilesPerGradleTestTask = classFilesPerGradleTestTask();
|
||||
|
||||
problems = collectProblems(
|
||||
checkNoneExists(
|
||||
"Test classes implemented by inner classes will not run",
|
||||
classes.stream()
|
||||
classes.values().stream()
|
||||
.filter(isStaticClass)
|
||||
.filter(implementsNamingConvention.or(this::seemsLikeATest))
|
||||
).stream(),
|
||||
),
|
||||
checkNoneExists(
|
||||
"Seem like test classes but don't match naming convention",
|
||||
classes.stream()
|
||||
classes.values().stream()
|
||||
.filter(isStaticClass.negate())
|
||||
.filter(isPublicClass)
|
||||
.filter(this::seemsLikeATest)
|
||||
.filter(implementsNamingConvention.negate())
|
||||
).stream()
|
||||
).collect(Collectors.toList());
|
||||
),
|
||||
checkNoneExists(
|
||||
"Test classes are not included in any enabled task (" +
|
||||
Stream.concat(
|
||||
classFilesPerRandomizedTestingTask.keySet().stream(),
|
||||
classFilesPerGradleTestTask.keySet().stream()
|
||||
).collect(Collectors.joining(",")) + ")",
|
||||
allTestClassFiles.getFiles().stream()
|
||||
.filter(testFile ->
|
||||
classFilesPerRandomizedTestingTask.values().stream()
|
||||
.anyMatch(fileSet -> fileSet.contains(testFile)) == false &&
|
||||
classFilesPerGradleTestTask.values().stream()
|
||||
.anyMatch(fileSet -> fileSet.contains(testFile)) == false
|
||||
)
|
||||
.map(classes::get)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (problems.isEmpty()) {
|
||||
getLogger().error(problems);
|
||||
throw new IllegalStateException("Testing conventions are not honored");
|
||||
} else {
|
||||
getSuccessMarker().getParentFile().mkdirs();
|
||||
Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE);
|
||||
} else {
|
||||
problems.forEach(getProject().getLogger()::error);
|
||||
throw new IllegalStateException("Testing conventions are not honored");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String collectProblems(String... problems) {
|
||||
return Stream.of(problems)
|
||||
.map(String::trim)
|
||||
.filter(String::isEmpty)
|
||||
.map(each -> each + "\n")
|
||||
.collect(Collectors.joining());
|
||||
}
|
||||
|
||||
|
||||
@Input
|
||||
public Map<String, Set<File>> classFilesPerRandomizedTestingTask(FileTree testClassFiles) {
|
||||
return
|
||||
Stream.concat(
|
||||
getProject().getTasks().withType(getRandomizedTestingTask()).stream(),
|
||||
// Look at sub-projects too. As sometimes tests are implemented in parent but ran in sub-projects against
|
||||
// different configurations
|
||||
getProject().getSubprojects().stream().flatMap(subproject ->
|
||||
subproject.getTasks().withType(getRandomizedTestingTask()).stream()
|
||||
)
|
||||
)
|
||||
.filter(Task::getEnabled)
|
||||
.collect(Collectors.toMap(
|
||||
Task::getPath,
|
||||
task -> testClassFiles.matching(getRandomizedTestingPatternSet(task)).getFiles()
|
||||
));
|
||||
}
|
||||
|
||||
@Input
|
||||
public Map<String, Set<File>> classFilesPerGradleTestTask() {
|
||||
return Stream.concat(
|
||||
getProject().getTasks().withType(Test.class).stream(),
|
||||
getProject().getSubprojects().stream().flatMap(subproject ->
|
||||
subproject.getTasks().withType(Test.class).stream()
|
||||
)
|
||||
)
|
||||
.filter(Task::getEnabled)
|
||||
.collect(Collectors.toMap(
|
||||
Task::getPath,
|
||||
task -> task.getCandidateClassFiles().getFiles()
|
||||
));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private PatternFilterable getRandomizedTestingPatternSet(Task task) {
|
||||
try {
|
||||
if (
|
||||
getRandomizedTestingTask().isAssignableFrom(task.getClass()) == false
|
||||
) {
|
||||
throw new IllegalStateException("Expected " + task + " to be RandomizedTestingTask or Test but it was " + task.getClass());
|
||||
}
|
||||
Method getPatternSet = task.getClass().getMethod("getPatternSet");
|
||||
return (PatternFilterable) getPatternSet.invoke(task);
|
||||
} catch (NoSuchMethodException e) {
|
||||
throw new IllegalStateException("Expecte task to have a `patternSet` " + task, e);
|
||||
} catch (IllegalAccessException | InvocationTargetException e) {
|
||||
throw new IllegalStateException("Failed to get pattern set from task" + task, e);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Class<? extends Task> getRandomizedTestingTask() {
|
||||
try {
|
||||
return (Class<? extends Task>) Class.forName("com.carrotsearch.gradle.junit4.RandomizedTestingTask");
|
||||
} catch (ClassNotFoundException | ClassCastException e) {
|
||||
throw new IllegalStateException("Failed to load randomized testing class", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Input
|
||||
@SkipWhenEmpty
|
||||
public List<String> getTestClassNames() {
|
||||
public Map<String, File> getTestClassNames() {
|
||||
if (testClassNames == null) {
|
||||
testClassNames = Boilerplate.getJavaSourceSets(getProject()).getByName("test").getOutput().getClassesDirs()
|
||||
.getFiles().stream()
|
||||
.filter(File::exists)
|
||||
.flatMap(testRoot -> walkPathAndLoadClasses(testRoot).stream())
|
||||
.collect(Collectors.toList());
|
||||
.flatMap(testRoot -> walkPathAndLoadClasses(testRoot).entrySet().stream())
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
}
|
||||
return testClassNames;
|
||||
}
|
||||
@ -127,16 +232,15 @@ public class TestingConventionsTasks extends DefaultTask {
|
||||
return new File(getProject().getBuildDir(), "markers/" + getName());
|
||||
}
|
||||
|
||||
private List<String> checkNoneExists(String message, Stream<? extends Class<?>> stream) {
|
||||
List<String> problems = new ArrayList<>();
|
||||
List<Class<?>> entries = stream.collect(Collectors.toList());
|
||||
if (entries.isEmpty() == false) {
|
||||
problems.add(message + ":");
|
||||
entries.stream()
|
||||
.map(each -> " * " + each.getName())
|
||||
.forEach(problems::add);
|
||||
private String checkNoneExists(String message, Stream<? extends Class<?>> stream) {
|
||||
String problem = stream
|
||||
.map(each -> " * " + each.getName())
|
||||
.collect(Collectors.joining("\n"));
|
||||
if (problem.isEmpty() == false) {
|
||||
return message + ":\n" + problem;
|
||||
} else{
|
||||
return "";
|
||||
}
|
||||
return problems;
|
||||
}
|
||||
|
||||
private boolean seemsLikeATest(Class<?> clazz) {
|
||||
@ -197,8 +301,8 @@ public class TestingConventionsTasks extends DefaultTask {
|
||||
);
|
||||
}
|
||||
|
||||
private List<String> walkPathAndLoadClasses(File testRoot) {
|
||||
List<String> classes = new ArrayList<>();
|
||||
private Map<String, File> walkPathAndLoadClasses(File testRoot) {
|
||||
Map<String, File> classes = new HashMap<>();
|
||||
try {
|
||||
Files.walkFileTree(testRoot.toPath(), new FileVisitor<Path>() {
|
||||
private String packageName;
|
||||
@ -227,7 +331,7 @@ public class TestingConventionsTasks extends DefaultTask {
|
||||
String filename = file.getFileName().toString();
|
||||
if (filename.endsWith(".class")) {
|
||||
String className = filename.substring(0, filename.length() - ".class".length());
|
||||
classes.add(packageName + className);
|
||||
classes.put(packageName + className, file.toFile());
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
@ -1 +1 @@
|
||||
4.10
|
||||
5.0
|
@ -19,7 +19,6 @@
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
@ -32,12 +31,8 @@ import org.gradle.api.Project;
|
||||
import org.gradle.api.plugins.JavaPlugin;
|
||||
import org.gradle.testfixtures.ProjectBuilder;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
public class FilePermissionsTaskTests extends GradleUnitTestCase {
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
public void testCheckPermissionsWhenAnExecutableFileExists() throws Exception {
|
||||
RandomizedTest.assumeFalse("Functionality is Unix specific", Os.isFamily(Os.FAMILY_WINDOWS));
|
||||
@ -93,11 +88,10 @@ public class FilePermissionsTaskTests extends GradleUnitTestCase {
|
||||
assertEquals("done", result.get(0));
|
||||
|
||||
file.delete();
|
||||
|
||||
}
|
||||
|
||||
private Project createProject() throws IOException {
|
||||
Project project = ProjectBuilder.builder().withProjectDir(temporaryFolder.newFolder()).build();
|
||||
private Project createProject() {
|
||||
Project project = ProjectBuilder.builder().build();
|
||||
project.getPlugins().apply(JavaPlugin.class);
|
||||
return project;
|
||||
}
|
||||
@ -105,4 +99,5 @@ public class FilePermissionsTaskTests extends GradleUnitTestCase {
|
||||
private FilePermissionsTask createTask(Project project) {
|
||||
return project.getTasks().create("filePermissionsTask", FilePermissionsTask.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
0
buildSrc/src/testKit/jarHell/settings.gradle
Normal file
0
buildSrc/src/testKit/jarHell/settings.gradle
Normal file
0
buildSrc/src/testKit/testclusters/settings.gradle
Normal file
0
buildSrc/src/testKit/testclusters/settings.gradle
Normal file
@ -1,5 +1,5 @@
|
||||
elasticsearch = 7.0.0
|
||||
lucene = 8.0.0-snapshot-67cdd21996
|
||||
lucene = 8.0.0-snapshot-c78429a554
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
@ -36,9 +36,17 @@ publishing {
|
||||
}
|
||||
}
|
||||
|
||||
//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions)
|
||||
Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE)
|
||||
test.dependsOn(copyRestSpec)
|
||||
configurations {
|
||||
restSpec
|
||||
}
|
||||
|
||||
idea {
|
||||
module {
|
||||
if (scopes.TEST != null) {
|
||||
scopes.TEST.plus.add(project.configurations.restSpec)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
/*
|
||||
@ -59,6 +67,16 @@ dependencies {
|
||||
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
|
||||
//this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs
|
||||
testCompile "org.elasticsearch:rest-api-spec:${version}"
|
||||
|
||||
restSpec "org.elasticsearch:rest-api-spec:${version}"
|
||||
}
|
||||
|
||||
//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions)
|
||||
processTestResources {
|
||||
dependsOn jar // so that configurations resolve
|
||||
from({ zipTree(configurations.restSpec.singleFile) }) {
|
||||
include 'rest-api-spec/api/**'
|
||||
}
|
||||
}
|
||||
|
||||
dependencyLicenses {
|
||||
|
@ -20,6 +20,8 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.ccr.CcrStatsRequest;
|
||||
import org.elasticsearch.client.ccr.CcrStatsResponse;
|
||||
import org.elasticsearch.client.ccr.DeleteAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternResponse;
|
||||
@ -360,4 +362,48 @@ public final class CcrClient {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all CCR stats.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-stats.html">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public CcrStatsResponse getCcrStats(CcrStatsRequest request,
|
||||
RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
CcrRequestConverters::getCcrStats,
|
||||
options,
|
||||
CcrStatsResponse::fromXContent,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all CCR stats.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-stats.html">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public void getCcrStatsAsync(CcrStatsRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<CcrStatsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
CcrRequestConverters::getCcrStats,
|
||||
options,
|
||||
CcrStatsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.ccr.CcrStatsRequest;
|
||||
import org.elasticsearch.client.ccr.DeleteAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.PauseFollowRequest;
|
||||
@ -100,4 +101,11 @@ final class CcrRequestConverters {
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request getCcrStats(CcrStatsRequest ccrStatsRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ccr", "stats")
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -26,6 +26,8 @@ import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchResponse;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.GetWatchRequest;
|
||||
import org.elasticsearch.client.watcher.GetWatchResponse;
|
||||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
@ -269,6 +271,33 @@ public final class WatcherClient {
|
||||
ActivateWatchResponse::fromXContent, listener, singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a watch on the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-execute-watch.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException if there is a problem sending the request or parsing the response
|
||||
*/
|
||||
public ExecuteWatchResponse executeWatch(ExecuteWatchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::executeWatch, options,
|
||||
ExecuteWatchResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously execute a watch on the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-execute-watch.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notifed upon request completion
|
||||
*/
|
||||
public void executeWatchAsync(ExecuteWatchRequest request, RequestOptions options, ActionListener<ExecuteWatchResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::executeWatch, options,
|
||||
ExecuteWatchResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the watcher stats
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-stats.html">
|
||||
|
@ -25,16 +25,20 @@ import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.GetWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
import org.elasticsearch.client.watcher.StopWatchServiceRequest;
|
||||
import org.elasticsearch.client.watcher.WatcherStatsRequest;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
final class WatcherRequestConverters {
|
||||
|
||||
@ -108,6 +112,28 @@ final class WatcherRequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request executeWatch(ExecuteWatchRequest executeWatchRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack", "watcher", "watch")
|
||||
.addPathPart(executeWatchRequest.getId()) // will ignore if ID is null
|
||||
.addPathPartAsIs("_execute").build();
|
||||
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
if (executeWatchRequest.isDebug()) {
|
||||
params.putParam("debug", "true");
|
||||
}
|
||||
if (executeWatchRequest.ignoreCondition()) {
|
||||
params.putParam("ignore_condition", "true");
|
||||
}
|
||||
if (executeWatchRequest.recordExecution()) {
|
||||
params.putParam("record_execution", "true");
|
||||
}
|
||||
|
||||
request.setEntity(RequestConverters.createEntity(executeWatchRequest, XContentType.JSON));
|
||||
return request;
|
||||
}
|
||||
|
||||
public static Request ackWatch(AckWatchRequest ackWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack", "watcher", "watch")
|
||||
|
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ccr;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public final class AutoFollowStats {
|
||||
|
||||
static final ParseField NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED = new ParseField("number_of_successful_follow_indices");
|
||||
static final ParseField NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED = new ParseField("number_of_failed_follow_indices");
|
||||
static final ParseField NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS =
|
||||
new ParseField("number_of_failed_remote_cluster_state_requests");
|
||||
static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors");
|
||||
static final ParseField LEADER_INDEX = new ParseField("leader_index");
|
||||
static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<AutoFollowStats, Void> STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats",
|
||||
args -> new AutoFollowStats(
|
||||
(Long) args[0],
|
||||
(Long) args[1],
|
||||
(Long) args[2],
|
||||
new TreeMap<>(
|
||||
((List<Map.Entry<String, ElasticsearchException>>) args[3])
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
|
||||
));
|
||||
|
||||
private static final ConstructingObjectParser<Map.Entry<String, ElasticsearchException>, Void> AUTO_FOLLOW_EXCEPTIONS_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"auto_follow_stats_errors",
|
||||
args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1]));
|
||||
|
||||
static {
|
||||
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX);
|
||||
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject(
|
||||
ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> ElasticsearchException.fromXContent(p),
|
||||
AUTO_FOLLOW_EXCEPTION);
|
||||
|
||||
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED);
|
||||
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS);
|
||||
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED);
|
||||
STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER,
|
||||
RECENT_AUTO_FOLLOW_ERRORS);
|
||||
}
|
||||
|
||||
private final long numberOfFailedFollowIndices;
|
||||
private final long numberOfFailedRemoteClusterStateRequests;
|
||||
private final long numberOfSuccessfulFollowIndices;
|
||||
private final NavigableMap<String, ElasticsearchException> recentAutoFollowErrors;
|
||||
|
||||
AutoFollowStats(long numberOfFailedFollowIndices,
|
||||
long numberOfFailedRemoteClusterStateRequests,
|
||||
long numberOfSuccessfulFollowIndices,
|
||||
NavigableMap<String, ElasticsearchException> recentAutoFollowErrors) {
|
||||
this.numberOfFailedFollowIndices = numberOfFailedFollowIndices;
|
||||
this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests;
|
||||
this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices;
|
||||
this.recentAutoFollowErrors = recentAutoFollowErrors;
|
||||
}
|
||||
|
||||
public long getNumberOfFailedFollowIndices() {
|
||||
return numberOfFailedFollowIndices;
|
||||
}
|
||||
|
||||
public long getNumberOfFailedRemoteClusterStateRequests() {
|
||||
return numberOfFailedRemoteClusterStateRequests;
|
||||
}
|
||||
|
||||
public long getNumberOfSuccessfulFollowIndices() {
|
||||
return numberOfSuccessfulFollowIndices;
|
||||
}
|
||||
|
||||
public NavigableMap<String, ElasticsearchException> getRecentAutoFollowErrors() {
|
||||
return recentAutoFollowErrors;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ccr;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
public final class CcrStatsRequest implements Validatable {
|
||||
}
|
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ccr;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
public final class CcrStatsResponse {
|
||||
|
||||
static final ParseField AUTO_FOLLOW_STATS_FIELD = new ParseField("auto_follow_stats");
|
||||
static final ParseField FOLLOW_STATS_FIELD = new ParseField("follow_stats");
|
||||
|
||||
private static final ConstructingObjectParser<CcrStatsResponse, Void> PARSER = new ConstructingObjectParser<>("indices",
|
||||
args -> {
|
||||
AutoFollowStats autoFollowStats = (AutoFollowStats) args[0];
|
||||
IndicesFollowStats indicesFollowStats = (IndicesFollowStats) args[1];
|
||||
return new CcrStatsResponse(autoFollowStats, indicesFollowStats);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), AutoFollowStats.STATS_PARSER, AUTO_FOLLOW_STATS_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), IndicesFollowStats.PARSER, FOLLOW_STATS_FIELD);
|
||||
}
|
||||
|
||||
public static CcrStatsResponse fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final AutoFollowStats autoFollowStats;
|
||||
private final IndicesFollowStats indicesFollowStats;
|
||||
|
||||
public CcrStatsResponse(AutoFollowStats autoFollowStats, IndicesFollowStats indicesFollowStats) {
|
||||
this.autoFollowStats = autoFollowStats;
|
||||
this.indicesFollowStats = indicesFollowStats;
|
||||
}
|
||||
|
||||
public AutoFollowStats getAutoFollowStats() {
|
||||
return autoFollowStats;
|
||||
}
|
||||
|
||||
public IndicesFollowStats getIndicesFollowStats() {
|
||||
return indicesFollowStats;
|
||||
}
|
||||
}
|
@ -0,0 +1,403 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ccr;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public final class IndicesFollowStats {
|
||||
|
||||
static final ParseField INDICES_FIELD = new ParseField("indices");
|
||||
static final ParseField INDEX_FIELD = new ParseField("index");
|
||||
static final ParseField SHARDS_FIELD = new ParseField("shards");
|
||||
|
||||
private static final ConstructingObjectParser<Tuple<String, List<ShardFollowStats>>, Void> ENTRY_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"entry",
|
||||
args -> {
|
||||
String index = (String) args[0];
|
||||
@SuppressWarnings("unchecked")
|
||||
List<ShardFollowStats> shardFollowStats = (List<ShardFollowStats>) args[1];
|
||||
return new Tuple<>(index, shardFollowStats);
|
||||
}
|
||||
);
|
||||
|
||||
static {
|
||||
ENTRY_PARSER.declareString(ConstructingObjectParser.constructorArg(), INDEX_FIELD);
|
||||
ENTRY_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ShardFollowStats.PARSER, SHARDS_FIELD);
|
||||
}
|
||||
|
||||
static final ConstructingObjectParser<IndicesFollowStats, Void> PARSER = new ConstructingObjectParser<>("indices",
|
||||
args -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Tuple<String, List<ShardFollowStats>>> entries = (List<Tuple<String, List<ShardFollowStats>>>) args[0];
|
||||
Map<String, List<ShardFollowStats>> shardFollowStats = entries.stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2));
|
||||
return new IndicesFollowStats(new TreeMap<>(shardFollowStats));
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ENTRY_PARSER, INDICES_FIELD);
|
||||
}
|
||||
|
||||
private final NavigableMap<String, List<ShardFollowStats>> shardFollowStats;
|
||||
|
||||
IndicesFollowStats(NavigableMap<String, List<ShardFollowStats>> shardFollowStats) {
|
||||
this.shardFollowStats = Collections.unmodifiableNavigableMap(shardFollowStats);
|
||||
}
|
||||
|
||||
public List<ShardFollowStats> getShardFollowStats(String index) {
|
||||
return shardFollowStats.get(index);
|
||||
}
|
||||
|
||||
public Map<String, List<ShardFollowStats>> getShardFollowStats() {
|
||||
return shardFollowStats;
|
||||
}
|
||||
|
||||
public static final class ShardFollowStats {
|
||||
|
||||
|
||||
static final ParseField LEADER_CLUSTER = new ParseField("remote_cluster");
|
||||
static final ParseField LEADER_INDEX = new ParseField("leader_index");
|
||||
static final ParseField FOLLOWER_INDEX = new ParseField("follower_index");
|
||||
static final ParseField SHARD_ID = new ParseField("shard_id");
|
||||
static final ParseField LEADER_GLOBAL_CHECKPOINT_FIELD = new ParseField("leader_global_checkpoint");
|
||||
static final ParseField LEADER_MAX_SEQ_NO_FIELD = new ParseField("leader_max_seq_no");
|
||||
static final ParseField FOLLOWER_GLOBAL_CHECKPOINT_FIELD = new ParseField("follower_global_checkpoint");
|
||||
static final ParseField FOLLOWER_MAX_SEQ_NO_FIELD = new ParseField("follower_max_seq_no");
|
||||
static final ParseField LAST_REQUESTED_SEQ_NO_FIELD = new ParseField("last_requested_seq_no");
|
||||
static final ParseField OUTSTANDING_READ_REQUESTS = new ParseField("outstanding_read_requests");
|
||||
static final ParseField OUTSTANDING_WRITE_REQUESTS = new ParseField("outstanding_write_requests");
|
||||
static final ParseField WRITE_BUFFER_OPERATION_COUNT_FIELD = new ParseField("write_buffer_operation_count");
|
||||
static final ParseField WRITE_BUFFER_SIZE_IN_BYTES_FIELD = new ParseField("write_buffer_size_in_bytes");
|
||||
static final ParseField FOLLOWER_MAPPING_VERSION_FIELD = new ParseField("follower_mapping_version");
|
||||
static final ParseField FOLLOWER_SETTINGS_VERSION_FIELD = new ParseField("follower_settings_version");
|
||||
static final ParseField TOTAL_READ_TIME_MILLIS_FIELD = new ParseField("total_read_time_millis");
|
||||
static final ParseField TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD = new ParseField("total_read_remote_exec_time_millis");
|
||||
static final ParseField SUCCESSFUL_READ_REQUESTS_FIELD = new ParseField("successful_read_requests");
|
||||
static final ParseField FAILED_READ_REQUESTS_FIELD = new ParseField("failed_read_requests");
|
||||
static final ParseField OPERATIONS_READ_FIELD = new ParseField("operations_read");
|
||||
static final ParseField BYTES_READ = new ParseField("bytes_read");
|
||||
static final ParseField TOTAL_WRITE_TIME_MILLIS_FIELD = new ParseField("total_write_time_millis");
|
||||
static final ParseField SUCCESSFUL_WRITE_REQUESTS_FIELD = new ParseField("successful_write_requests");
|
||||
static final ParseField FAILED_WRITE_REQUEST_FIELD = new ParseField("failed_write_requests");
|
||||
static final ParseField OPERATIONS_WRITTEN = new ParseField("operations_written");
|
||||
static final ParseField READ_EXCEPTIONS = new ParseField("read_exceptions");
|
||||
static final ParseField TIME_SINCE_LAST_READ_MILLIS_FIELD = new ParseField("time_since_last_read_millis");
|
||||
static final ParseField FATAL_EXCEPTION = new ParseField("fatal_exception");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<ShardFollowStats, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"shard-follow-stats",
|
||||
args -> new ShardFollowStats(
|
||||
(String) args[0],
|
||||
(String) args[1],
|
||||
(String) args[2],
|
||||
(int) args[3],
|
||||
(long) args[4],
|
||||
(long) args[5],
|
||||
(long) args[6],
|
||||
(long) args[7],
|
||||
(long) args[8],
|
||||
(int) args[9],
|
||||
(int) args[10],
|
||||
(int) args[11],
|
||||
(long) args[12],
|
||||
(long) args[13],
|
||||
(long) args[14],
|
||||
(long) args[15],
|
||||
(long) args[16],
|
||||
(long) args[17],
|
||||
(long) args[18],
|
||||
(long) args[19],
|
||||
(long) args[20],
|
||||
(long) args[21],
|
||||
(long) args[22],
|
||||
(long) args[23],
|
||||
(long) args[24],
|
||||
(long) args[25],
|
||||
new TreeMap<>(
|
||||
((List<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>>) args[26])
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
|
||||
(ElasticsearchException) args[27]));
|
||||
|
||||
static final ConstructingObjectParser<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>, Void> READ_EXCEPTIONS_ENTRY_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"shard-follow-stats-read-exceptions-entry",
|
||||
args -> new AbstractMap.SimpleEntry<>((long) args[0], Tuple.tuple((Integer) args[1], (ElasticsearchException)args[2])));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_CLUSTER);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FOLLOWER_INDEX);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SHARD_ID);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), LEADER_GLOBAL_CHECKPOINT_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), LEADER_MAX_SEQ_NO_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_GLOBAL_CHECKPOINT_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAX_SEQ_NO_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_REQUESTED_SEQ_NO_FIELD);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), OUTSTANDING_READ_REQUESTS);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), OUTSTANDING_WRITE_REQUESTS);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_OPERATION_COUNT_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_SIZE_IN_BYTES_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAPPING_VERSION_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_SETTINGS_VERSION_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_TIME_MILLIS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_READ_REQUESTS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILED_READ_REQUESTS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_READ_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BYTES_READ);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_WRITE_TIME_MILLIS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_WRITE_REQUESTS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILED_WRITE_REQUEST_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_WRITTEN);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_READ_MILLIS_FIELD);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), READ_EXCEPTIONS_ENTRY_PARSER, READ_EXCEPTIONS);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
(p, c) -> ElasticsearchException.fromXContent(p),
|
||||
FATAL_EXCEPTION);
|
||||
}
|
||||
|
||||
static final ParseField READ_EXCEPTIONS_ENTRY_FROM_SEQ_NO = new ParseField("from_seq_no");
|
||||
static final ParseField READ_EXCEPTIONS_RETRIES = new ParseField("retries");
|
||||
static final ParseField READ_EXCEPTIONS_ENTRY_EXCEPTION = new ParseField("exception");
|
||||
|
||||
static {
|
||||
READ_EXCEPTIONS_ENTRY_PARSER.declareLong(ConstructingObjectParser.constructorArg(), READ_EXCEPTIONS_ENTRY_FROM_SEQ_NO);
|
||||
READ_EXCEPTIONS_ENTRY_PARSER.declareInt(ConstructingObjectParser.constructorArg(), READ_EXCEPTIONS_RETRIES);
|
||||
READ_EXCEPTIONS_ENTRY_PARSER.declareObject(
|
||||
ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> ElasticsearchException.fromXContent(p),
|
||||
READ_EXCEPTIONS_ENTRY_EXCEPTION);
|
||||
}
|
||||
|
||||
private final String remoteCluster;
|
||||
private final String leaderIndex;
|
||||
private final String followerIndex;
|
||||
private final int shardId;
|
||||
private final long leaderGlobalCheckpoint;
|
||||
private final long leaderMaxSeqNo;
|
||||
private final long followerGlobalCheckpoint;
|
||||
private final long followerMaxSeqNo;
|
||||
private final long lastRequestedSeqNo;
|
||||
private final int outstandingReadRequests;
|
||||
private final int outstandingWriteRequests;
|
||||
private final int writeBufferOperationCount;
|
||||
private final long writeBufferSizeInBytes;
|
||||
private final long followerMappingVersion;
|
||||
private final long followerSettingsVersion;
|
||||
private final long totalReadTimeMillis;
|
||||
private final long totalReadRemoteExecTimeMillis;
|
||||
private final long successfulReadRequests;
|
||||
private final long failedReadRequests;
|
||||
private final long operationsReads;
|
||||
private final long bytesRead;
|
||||
private final long totalWriteTimeMillis;
|
||||
private final long successfulWriteRequests;
|
||||
private final long failedWriteRequests;
|
||||
private final long operationWritten;
|
||||
private final long timeSinceLastReadMillis;
|
||||
private final NavigableMap<Long, Tuple<Integer, ElasticsearchException>> readExceptions;
|
||||
private final ElasticsearchException fatalException;
|
||||
|
||||
ShardFollowStats(String remoteCluster,
|
||||
String leaderIndex,
|
||||
String followerIndex,
|
||||
int shardId,
|
||||
long leaderGlobalCheckpoint,
|
||||
long leaderMaxSeqNo,
|
||||
long followerGlobalCheckpoint,
|
||||
long followerMaxSeqNo,
|
||||
long lastRequestedSeqNo,
|
||||
int outstandingReadRequests,
|
||||
int outstandingWriteRequests,
|
||||
int writeBufferOperationCount,
|
||||
long writeBufferSizeInBytes,
|
||||
long followerMappingVersion,
|
||||
long followerSettingsVersion,
|
||||
long totalReadTimeMillis,
|
||||
long totalReadRemoteExecTimeMillis,
|
||||
long successfulReadRequests,
|
||||
long failedReadRequests,
|
||||
long operationsReads,
|
||||
long bytesRead,
|
||||
long totalWriteTimeMillis,
|
||||
long successfulWriteRequests,
|
||||
long failedWriteRequests,
|
||||
long operationWritten,
|
||||
long timeSinceLastReadMillis,
|
||||
NavigableMap<Long, Tuple<Integer, ElasticsearchException>> readExceptions,
|
||||
ElasticsearchException fatalException) {
|
||||
this.remoteCluster = remoteCluster;
|
||||
this.leaderIndex = leaderIndex;
|
||||
this.followerIndex = followerIndex;
|
||||
this.shardId = shardId;
|
||||
this.leaderGlobalCheckpoint = leaderGlobalCheckpoint;
|
||||
this.leaderMaxSeqNo = leaderMaxSeqNo;
|
||||
this.followerGlobalCheckpoint = followerGlobalCheckpoint;
|
||||
this.followerMaxSeqNo = followerMaxSeqNo;
|
||||
this.lastRequestedSeqNo = lastRequestedSeqNo;
|
||||
this.outstandingReadRequests = outstandingReadRequests;
|
||||
this.outstandingWriteRequests = outstandingWriteRequests;
|
||||
this.writeBufferOperationCount = writeBufferOperationCount;
|
||||
this.writeBufferSizeInBytes = writeBufferSizeInBytes;
|
||||
this.followerMappingVersion = followerMappingVersion;
|
||||
this.followerSettingsVersion = followerSettingsVersion;
|
||||
this.totalReadTimeMillis = totalReadTimeMillis;
|
||||
this.totalReadRemoteExecTimeMillis = totalReadRemoteExecTimeMillis;
|
||||
this.successfulReadRequests = successfulReadRequests;
|
||||
this.failedReadRequests = failedReadRequests;
|
||||
this.operationsReads = operationsReads;
|
||||
this.bytesRead = bytesRead;
|
||||
this.totalWriteTimeMillis = totalWriteTimeMillis;
|
||||
this.successfulWriteRequests = successfulWriteRequests;
|
||||
this.failedWriteRequests = failedWriteRequests;
|
||||
this.operationWritten = operationWritten;
|
||||
this.timeSinceLastReadMillis = timeSinceLastReadMillis;
|
||||
this.readExceptions = readExceptions;
|
||||
this.fatalException = fatalException;
|
||||
}
|
||||
|
||||
public String getRemoteCluster() {
|
||||
return remoteCluster;
|
||||
}
|
||||
|
||||
public String getLeaderIndex() {
|
||||
return leaderIndex;
|
||||
}
|
||||
|
||||
public String getFollowerIndex() {
|
||||
return followerIndex;
|
||||
}
|
||||
|
||||
public int getShardId() {
|
||||
return shardId;
|
||||
}
|
||||
|
||||
public long getLeaderGlobalCheckpoint() {
|
||||
return leaderGlobalCheckpoint;
|
||||
}
|
||||
|
||||
public long getLeaderMaxSeqNo() {
|
||||
return leaderMaxSeqNo;
|
||||
}
|
||||
|
||||
public long getFollowerGlobalCheckpoint() {
|
||||
return followerGlobalCheckpoint;
|
||||
}
|
||||
|
||||
public long getFollowerMaxSeqNo() {
|
||||
return followerMaxSeqNo;
|
||||
}
|
||||
|
||||
public long getLastRequestedSeqNo() {
|
||||
return lastRequestedSeqNo;
|
||||
}
|
||||
|
||||
public int getOutstandingReadRequests() {
|
||||
return outstandingReadRequests;
|
||||
}
|
||||
|
||||
public int getOutstandingWriteRequests() {
|
||||
return outstandingWriteRequests;
|
||||
}
|
||||
|
||||
public int getWriteBufferOperationCount() {
|
||||
return writeBufferOperationCount;
|
||||
}
|
||||
|
||||
public long getWriteBufferSizeInBytes() {
|
||||
return writeBufferSizeInBytes;
|
||||
}
|
||||
|
||||
public long getFollowerMappingVersion() {
|
||||
return followerMappingVersion;
|
||||
}
|
||||
|
||||
public long getFollowerSettingsVersion() {
|
||||
return followerSettingsVersion;
|
||||
}
|
||||
|
||||
public long getTotalReadTimeMillis() {
|
||||
return totalReadTimeMillis;
|
||||
}
|
||||
|
||||
public long getTotalReadRemoteExecTimeMillis() {
|
||||
return totalReadRemoteExecTimeMillis;
|
||||
}
|
||||
|
||||
public long getSuccessfulReadRequests() {
|
||||
return successfulReadRequests;
|
||||
}
|
||||
|
||||
public long getFailedReadRequests() {
|
||||
return failedReadRequests;
|
||||
}
|
||||
|
||||
public long getOperationsReads() {
|
||||
return operationsReads;
|
||||
}
|
||||
|
||||
public long getBytesRead() {
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
public long getTotalWriteTimeMillis() {
|
||||
return totalWriteTimeMillis;
|
||||
}
|
||||
|
||||
public long getSuccessfulWriteRequests() {
|
||||
return successfulWriteRequests;
|
||||
}
|
||||
|
||||
public long getFailedWriteRequests() {
|
||||
return failedWriteRequests;
|
||||
}
|
||||
|
||||
public long getOperationWritten() {
|
||||
return operationWritten;
|
||||
}
|
||||
|
||||
public long getTimeSinceLastReadMillis() {
|
||||
return timeSinceLastReadMillis;
|
||||
}
|
||||
|
||||
public NavigableMap<Long, Tuple<Integer, ElasticsearchException>> getReadExceptions() {
|
||||
return readExceptions;
|
||||
}
|
||||
|
||||
public ElasticsearchException getFatalException() {
|
||||
return fatalException;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.common;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ObjectPath;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Encapsulates the xcontent source
|
||||
*/
|
||||
public class XContentSource {
|
||||
|
||||
private final Object data;
|
||||
|
||||
/**
|
||||
* Constructs a new XContentSource out of the given parser
|
||||
*/
|
||||
public XContentSource(XContentParser parser) throws IOException {
|
||||
this.data = XContentUtils.readValue(parser, parser.nextToken());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the top level value of the source is a map
|
||||
*/
|
||||
public boolean isMap() {
|
||||
return data instanceof Map;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The source as a map
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, Object> getAsMap() {
|
||||
return (Map<String, Object>) data;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the top level value of the source is a list
|
||||
*/
|
||||
public boolean isList() {
|
||||
return data instanceof List;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The source as a list
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Object> getAsList() {
|
||||
return (List<Object>) data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a value identified by the given path in the source.
|
||||
*
|
||||
* @param path a dot notation path to the requested value
|
||||
* @return The extracted value or {@code null} if no value is associated with the given path
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T getValue(String path) {
|
||||
return (T) ObjectPath.eval(path, data);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,175 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An execute watch request to execute a watch by id or inline
|
||||
*/
|
||||
public class ExecuteWatchRequest implements Validatable, ToXContentObject {
|
||||
|
||||
public enum ActionExecutionMode {
|
||||
SIMULATE, FORCE_SIMULATE, EXECUTE, FORCE_EXECUTE, SKIP
|
||||
}
|
||||
|
||||
private final String id;
|
||||
private final BytesReference watchContent;
|
||||
|
||||
private boolean ignoreCondition = false;
|
||||
private boolean recordExecution = false;
|
||||
private boolean debug = false;
|
||||
|
||||
@Nullable
|
||||
private BytesReference triggerData = null;
|
||||
|
||||
@Nullable
|
||||
private BytesReference alternativeInput = null;
|
||||
|
||||
private Map<String, ActionExecutionMode> actionModes = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Execute an existing watch on the cluster
|
||||
*
|
||||
* @param id the id of the watch to execute
|
||||
*/
|
||||
public static ExecuteWatchRequest byId(String id) {
|
||||
return new ExecuteWatchRequest(Objects.requireNonNull(id, "Watch id cannot be null"), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute an inline watch
|
||||
* @param watchContent the JSON definition of the watch
|
||||
*/
|
||||
public static ExecuteWatchRequest inline(String watchContent) {
|
||||
return new ExecuteWatchRequest(null, Objects.requireNonNull(watchContent, "Watch content cannot be null"));
|
||||
}
|
||||
|
||||
private ExecuteWatchRequest(String id, String watchContent) {
|
||||
this.id = id;
|
||||
this.watchContent = watchContent == null ? null : new BytesArray(watchContent);
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param ignoreCondition set if the condition for this execution be ignored
|
||||
*/
|
||||
public void setIgnoreCondition(boolean ignoreCondition) {
|
||||
this.ignoreCondition = ignoreCondition;
|
||||
}
|
||||
|
||||
public boolean ignoreCondition() {
|
||||
return ignoreCondition;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordExecution Sets if this execution be recorded in the history index
|
||||
*/
|
||||
public void setRecordExecution(boolean recordExecution) {
|
||||
if (watchContent != null && recordExecution) {
|
||||
throw new IllegalArgumentException("The execution of an inline watch cannot be recorded");
|
||||
}
|
||||
this.recordExecution = recordExecution;
|
||||
}
|
||||
|
||||
public boolean recordExecution() {
|
||||
return recordExecution;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param alternativeInput Sets the alternative input
|
||||
*/
|
||||
public void setAlternativeInput(String alternativeInput) {
|
||||
this.alternativeInput = new BytesArray(alternativeInput);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param data A JSON string representing the data that should be associated with the trigger event.
|
||||
*/
|
||||
public void setTriggerData(String data) {
|
||||
this.triggerData = new BytesArray(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the action execution mode for the give action (identified by its id).
|
||||
*
|
||||
* @param actionId the action id.
|
||||
* @param actionMode the execution mode of the action.
|
||||
*/
|
||||
public void setActionMode(String actionId, ActionExecutionMode actionMode) {
|
||||
Objects.requireNonNull(actionId, "actionId cannot be null");
|
||||
actionModes.put(actionId, actionMode);
|
||||
}
|
||||
|
||||
public Map<String, ActionExecutionMode> getActionModes() {
|
||||
return this.actionModes;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param debug indicates whether the watch should execute in debug mode. In debug mode the
|
||||
* returned watch record will hold the execution {@code vars}
|
||||
*/
|
||||
public void setDebug(boolean debug) {
|
||||
this.debug = debug;
|
||||
}
|
||||
|
||||
public boolean isDebug() {
|
||||
return debug;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "execute[" + id + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (triggerData != null) {
|
||||
builder.rawField("trigger_data", triggerData.streamInput(), XContentType.JSON);
|
||||
}
|
||||
if (alternativeInput != null) {
|
||||
builder.rawField("alternative_input", alternativeInput.streamInput(), XContentType.JSON);
|
||||
}
|
||||
if (actionModes.size() > 0) {
|
||||
builder.field("action_modes", actionModes);
|
||||
}
|
||||
if (watchContent != null) {
|
||||
builder.rawField("watch", watchContent.streamInput(), XContentType.JSON);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.XContentUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Map;
|
||||
|
||||
public class ExecuteWatchResponse {
|
||||
|
||||
public static final ParseField ID_FIELD = new ParseField("_id");
|
||||
public static final ParseField WATCH_FIELD = new ParseField("watch_record");
|
||||
|
||||
private String recordId;
|
||||
private BytesReference contentSource;
|
||||
|
||||
private Map<String, Object> data;
|
||||
|
||||
public ExecuteWatchResponse() {
|
||||
}
|
||||
|
||||
public ExecuteWatchResponse(String recordId, BytesReference contentSource) {
|
||||
this.recordId = recordId;
|
||||
this.contentSource = contentSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The id of the watch record holding the watch execution result.
|
||||
*/
|
||||
public String getRecordId() {
|
||||
return recordId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The watch record source
|
||||
*/
|
||||
public BytesReference getRecord() {
|
||||
return contentSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the watch record as a map
|
||||
*
|
||||
* Use {@link org.elasticsearch.common.xcontent.ObjectPath} to navigate through the data
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, Object> getRecordAsMap() {
|
||||
if (data == null) {
|
||||
// EMPTY is safe here because we never use namedObject
|
||||
try (InputStream stream = contentSource.streamInput();
|
||||
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, null, stream)) {
|
||||
data = (Map<String, Object>) XContentUtils.readValue(parser, parser.nextToken());
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("failed to read value", ex);
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<ExecuteWatchResponse, Void> PARSER
|
||||
= new ConstructingObjectParser<>("x_pack_execute_watch_response", false,
|
||||
(fields) -> new ExecuteWatchResponse((String)fields[0], (BytesReference) fields[1]));
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> readBytesReference(p), WATCH_FIELD);
|
||||
}
|
||||
|
||||
public static ExecuteWatchResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private static BytesReference readBytesReference(XContentParser parser) throws IOException {
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
return BytesReference.bytes(builder);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -29,9 +29,12 @@ import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.ccr.CcrStatsRequest;
|
||||
import org.elasticsearch.client.ccr.CcrStatsResponse;
|
||||
import org.elasticsearch.client.ccr.DeleteAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternResponse;
|
||||
import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats;
|
||||
import org.elasticsearch.client.ccr.PauseFollowRequest;
|
||||
import org.elasticsearch.client.ccr.PutAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.PutFollowRequest;
|
||||
@ -39,7 +42,6 @@ import org.elasticsearch.client.ccr.PutFollowResponse;
|
||||
import org.elasticsearch.client.ccr.ResumeFollowRequest;
|
||||
import org.elasticsearch.client.ccr.UnfollowRequest;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.xcontent.ObjectPath;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
@ -47,6 +49,7 @@ import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
@ -104,6 +107,15 @@ public class CCRIT extends ESRestHighLevelClientTestCase {
|
||||
assertThat(leaderSearchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
assertBusy(() -> {
|
||||
CcrStatsRequest ccrStatsRequest = new CcrStatsRequest();
|
||||
CcrStatsResponse ccrStatsResponse = execute(ccrStatsRequest, ccrClient::getCcrStats, ccrClient::getCcrStatsAsync);
|
||||
List<ShardFollowStats> shardFollowStats = ccrStatsResponse.getIndicesFollowStats().getShardFollowStats("follower");
|
||||
long followerGlobalCheckpoint = shardFollowStats.stream()
|
||||
.mapToLong(ShardFollowStats::getFollowerGlobalCheckpoint)
|
||||
.max()
|
||||
.getAsLong();
|
||||
assertThat(followerGlobalCheckpoint, equalTo(0L));
|
||||
|
||||
SearchRequest followerSearchRequest = new SearchRequest("follower");
|
||||
SearchResponse followerSearchResponse = highLevelClient().search(followerSearchRequest, RequestOptions.DEFAULT);
|
||||
assertThat(followerSearchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
@ -120,6 +132,15 @@ public class CCRIT extends ESRestHighLevelClientTestCase {
|
||||
assertThat(resumeFollowResponse.isAcknowledged(), is(true));
|
||||
|
||||
assertBusy(() -> {
|
||||
CcrStatsRequest ccrStatsRequest = new CcrStatsRequest();
|
||||
CcrStatsResponse ccrStatsResponse = execute(ccrStatsRequest, ccrClient::getCcrStats, ccrClient::getCcrStatsAsync);
|
||||
List<ShardFollowStats> shardFollowStats = ccrStatsResponse.getIndicesFollowStats().getShardFollowStats("follower");
|
||||
long followerGlobalCheckpoint = shardFollowStats.stream()
|
||||
.mapToLong(ShardFollowStats::getFollowerGlobalCheckpoint)
|
||||
.max()
|
||||
.getAsLong();
|
||||
assertThat(followerGlobalCheckpoint, equalTo(1L));
|
||||
|
||||
SearchRequest followerSearchRequest = new SearchRequest("follower");
|
||||
SearchResponse followerSearchResponse = highLevelClient().search(followerSearchRequest, RequestOptions.DEFAULT);
|
||||
assertThat(followerSearchResponse.getHits().getTotalHits(), equalTo(2L));
|
||||
@ -156,15 +177,12 @@ public class CCRIT extends ESRestHighLevelClientTestCase {
|
||||
assertThat(response.isAcknowledged(), is(true));
|
||||
|
||||
assertBusy(() -> {
|
||||
assertThat(indexExists("copy-logs-20200101"), is(true));
|
||||
// TODO: replace with HLRC follow stats when available:
|
||||
Map<String, Object> rsp = toMap(client().performRequest(new Request("GET", "/copy-logs-20200101/_ccr/stats")));
|
||||
String index = null;
|
||||
try {
|
||||
index = ObjectPath.eval("indices.0.index", rsp);
|
||||
} catch (Exception e){ }
|
||||
assertThat(index, equalTo("copy-logs-20200101"));
|
||||
CcrStatsRequest ccrStatsRequest = new CcrStatsRequest();
|
||||
CcrStatsResponse ccrStatsResponse = execute(ccrStatsRequest, ccrClient::getCcrStats, ccrClient::getCcrStatsAsync);
|
||||
assertThat(ccrStatsResponse.getAutoFollowStats().getNumberOfSuccessfulFollowIndices(), equalTo(1L));
|
||||
assertThat(ccrStatsResponse.getIndicesFollowStats().getShardFollowStats("copy-logs-20200101"), notNullValue());
|
||||
});
|
||||
assertThat(indexExists("copy-logs-20200101"), is(true));
|
||||
|
||||
GetAutoFollowPatternRequest getAutoFollowPatternRequest =
|
||||
randomBoolean() ? new GetAutoFollowPatternRequest("pattern1") : new GetAutoFollowPatternRequest();
|
||||
|
@ -19,10 +19,6 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchResponse;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.AckWatchResponse;
|
||||
@ -30,6 +26,14 @@ import org.elasticsearch.client.watcher.ActionStatus;
|
||||
import org.elasticsearch.client.watcher.ActionStatus.AckStatus;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
import org.elasticsearch.client.watcher.StopWatchServiceRequest;
|
||||
import org.elasticsearch.client.watcher.WatcherState;
|
||||
@ -37,13 +41,13 @@ import org.elasticsearch.client.watcher.WatcherStatsRequest;
|
||||
import org.elasticsearch.client.watcher.WatcherStatsResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ObjectPath;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
@ -81,13 +85,14 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
|
||||
assertThat(putWatchResponse.getVersion(), is(1L));
|
||||
}
|
||||
|
||||
private static final String WATCH_JSON = "{ \n" +
|
||||
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
|
||||
" \"input\": { \"none\": {} },\n" +
|
||||
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
|
||||
"}";
|
||||
|
||||
private PutWatchResponse createWatch(String watchId) throws Exception {
|
||||
String json = "{ \n" +
|
||||
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
|
||||
" \"input\": { \"none\": {} },\n" +
|
||||
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
|
||||
"}";
|
||||
BytesReference bytesReference = new BytesArray(json);
|
||||
BytesReference bytesReference = new BytesArray(WATCH_JSON);
|
||||
PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, bytesReference, XContentType.JSON);
|
||||
return highLevelClient().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT);
|
||||
}
|
||||
@ -185,6 +190,37 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
|
||||
assertEquals(RestStatus.NOT_FOUND, exception.status());
|
||||
}
|
||||
|
||||
|
||||
public void testExecuteWatchById() throws Exception {
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
createWatch(watchId);
|
||||
|
||||
ExecuteWatchResponse response = highLevelClient().watcher()
|
||||
.executeWatch(ExecuteWatchRequest.byId(watchId), RequestOptions.DEFAULT);
|
||||
assertThat(response.getRecordId(), containsString(watchId));
|
||||
|
||||
Map<String, Object> source = response.getRecordAsMap();
|
||||
assertThat(ObjectPath.eval("trigger_event.type", source), is("manual"));
|
||||
|
||||
}
|
||||
|
||||
public void testExecuteWatchThatDoesNotExist() throws Exception {
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
// exception when activating a not existing watcher
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () ->
|
||||
highLevelClient().watcher().executeWatch(ExecuteWatchRequest.byId(watchId), RequestOptions.DEFAULT));
|
||||
assertEquals(RestStatus.NOT_FOUND, exception.status());
|
||||
}
|
||||
|
||||
public void testExecuteInlineWatch() throws Exception {
|
||||
ExecuteWatchResponse response = highLevelClient().watcher()
|
||||
.executeWatch(ExecuteWatchRequest.inline(WATCH_JSON), RequestOptions.DEFAULT);
|
||||
assertThat(response.getRecordId(), containsString("_inlined_"));
|
||||
|
||||
Map<String, Object> source = response.getRecordAsMap();
|
||||
assertThat(ObjectPath.eval("trigger_event.type", source), is("manual"));
|
||||
}
|
||||
|
||||
public void testWatcherStatsMetrics() throws Exception {
|
||||
boolean includeCurrent = randomBoolean();
|
||||
boolean includeQueued = randomBoolean();
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
@ -27,6 +28,7 @@ import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.GetWatchRequest;
|
||||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
@ -38,12 +40,15 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
@ -53,6 +58,12 @@ import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class WatcherRequestConvertersTests extends ESTestCase {
|
||||
|
||||
private static String toString(HttpEntity entity) throws IOException {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
entity.writeTo(baos);
|
||||
return baos.toString(StandardCharsets.UTF_8.name());
|
||||
}
|
||||
|
||||
public void testStartWatchService() {
|
||||
Request request = WatcherRequestConverters.startWatchService(new StartWatchServiceRequest());
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
@ -177,4 +188,138 @@ public class WatcherRequestConvertersTests extends ESTestCase {
|
||||
}
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testExecuteWatchByIdRequest() throws IOException {
|
||||
|
||||
boolean ignoreCondition = randomBoolean();
|
||||
boolean recordExecution = randomBoolean();
|
||||
boolean debug = randomBoolean();
|
||||
|
||||
ExecuteWatchRequest request = ExecuteWatchRequest.byId("my_id");
|
||||
request.setIgnoreCondition(ignoreCondition);
|
||||
request.setRecordExecution(recordExecution);
|
||||
request.setDebug(debug);
|
||||
|
||||
boolean setActionMode = randomBoolean();
|
||||
if (setActionMode) {
|
||||
request.setActionMode("action1", ExecuteWatchRequest.ActionExecutionMode.SIMULATE);
|
||||
}
|
||||
|
||||
boolean useTriggerData = randomBoolean();
|
||||
String triggerData = "{ \"entry1\" : \"blah\", \"entry2\" : \"blah\" }";
|
||||
if (useTriggerData) {
|
||||
request.setTriggerData(triggerData);
|
||||
}
|
||||
|
||||
boolean useAlternativeInput = randomBoolean();
|
||||
String alternativeInput = "{ \"foo\" : \"bar\" }";
|
||||
if (useAlternativeInput) {
|
||||
request.setAlternativeInput(alternativeInput);
|
||||
}
|
||||
|
||||
Request req = WatcherRequestConverters.executeWatch(request);
|
||||
assertThat(req.getEndpoint(), equalTo("/_xpack/watcher/watch/my_id/_execute"));
|
||||
assertThat(req.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||
|
||||
if (ignoreCondition) {
|
||||
assertThat(req.getParameters(), hasKey("ignore_condition"));
|
||||
assertThat(req.getParameters().get("ignore_condition"), is("true"));
|
||||
}
|
||||
|
||||
if (recordExecution) {
|
||||
assertThat(req.getParameters(), hasKey("record_execution"));
|
||||
assertThat(req.getParameters().get("record_execution"), is("true"));
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
assertThat(req.getParameters(), hasKey("debug"));
|
||||
assertThat(req.getParameters().get("debug"), is("true"));
|
||||
}
|
||||
|
||||
String body = toString(req.getEntity());
|
||||
if (setActionMode) {
|
||||
assertThat(body, containsString("\"action_modes\":{\"action1\":\"SIMULATE\"}"));
|
||||
}
|
||||
else {
|
||||
assertThat(body, not(containsString("action_modes")));
|
||||
}
|
||||
if (useTriggerData) {
|
||||
assertThat(body, containsString("\"trigger_data\":" + triggerData));
|
||||
}
|
||||
else {
|
||||
assertThat(body, not(containsString("trigger_data")));
|
||||
}
|
||||
if (useAlternativeInput) {
|
||||
assertThat(body, containsString("\"alternative_input\":" + alternativeInput));
|
||||
}
|
||||
else {
|
||||
assertThat(body, not(containsString("alternative_input")));
|
||||
}
|
||||
assertThat(body, not(containsString("\"watch\":")));
|
||||
|
||||
}
|
||||
|
||||
private static final String WATCH_JSON = "{ \n" +
|
||||
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
|
||||
" \"input\": { \"none\": {} },\n" +
|
||||
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
|
||||
"}";
|
||||
|
||||
public void testExecuteInlineWatchRequest() throws IOException {
|
||||
boolean ignoreCondition = randomBoolean();
|
||||
|
||||
ExecuteWatchRequest request = ExecuteWatchRequest.inline(WATCH_JSON);
|
||||
request.setIgnoreCondition(ignoreCondition);
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
request.setRecordExecution(true);
|
||||
});
|
||||
|
||||
boolean setActionMode = randomBoolean();
|
||||
if (setActionMode) {
|
||||
request.setActionMode("action1", ExecuteWatchRequest.ActionExecutionMode.SIMULATE);
|
||||
}
|
||||
|
||||
boolean useTriggerData = randomBoolean();
|
||||
String triggerData = "{ \"entry1\" : \"blah\", \"entry2\" : \"blah\" }";
|
||||
if (useTriggerData) {
|
||||
request.setTriggerData(triggerData);
|
||||
}
|
||||
|
||||
boolean useAlternativeInput = randomBoolean();
|
||||
String alternativeInput = "{ \"foo\" : \"bar\" }";
|
||||
if (useAlternativeInput) {
|
||||
request.setAlternativeInput(alternativeInput);
|
||||
}
|
||||
|
||||
Request req = WatcherRequestConverters.executeWatch(request);
|
||||
assertThat(req.getEndpoint(), equalTo("/_xpack/watcher/watch/_execute"));
|
||||
assertThat(req.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||
|
||||
if (ignoreCondition) {
|
||||
assertThat(req.getParameters(), hasKey("ignore_condition"));
|
||||
assertThat(req.getParameters().get("ignore_condition"), is("true"));
|
||||
}
|
||||
|
||||
String body = toString(req.getEntity());
|
||||
if (setActionMode) {
|
||||
assertThat(body, containsString("\"action_modes\":{\"action1\":\"SIMULATE\"}"));
|
||||
}
|
||||
else {
|
||||
assertThat(body, not(containsString("action_modes")));
|
||||
}
|
||||
if (useTriggerData) {
|
||||
assertThat(body, containsString("\"trigger_data\":" + triggerData));
|
||||
}
|
||||
else {
|
||||
assertThat(body, not(containsString("trigger_data")));
|
||||
}
|
||||
if (useAlternativeInput) {
|
||||
assertThat(body, containsString("\"alternative_input\":" + alternativeInput));
|
||||
}
|
||||
else {
|
||||
assertThat(body, not(containsString("alternative_input")));
|
||||
}
|
||||
assertThat(body, containsString("\"watch\":" + WATCH_JSON));
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,376 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ccr;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class CcrStatsResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
CcrStatsResponseTests::createTestInstance,
|
||||
CcrStatsResponseTests::toXContent,
|
||||
CcrStatsResponse::fromXContent)
|
||||
.supportsUnknownFields(false)
|
||||
.assertEqualsConsumer(CcrStatsResponseTests::assertEqualInstances)
|
||||
.assertToXContentEquivalence(false)
|
||||
.test();
|
||||
}
|
||||
|
||||
// Needed, because exceptions in IndicesFollowStats and AutoFollowStats cannot be compared
|
||||
private static void assertEqualInstances(CcrStatsResponse expectedInstance, CcrStatsResponse newInstance) {
|
||||
assertNotSame(expectedInstance, newInstance);
|
||||
|
||||
{
|
||||
AutoFollowStats newAutoFollowStats = newInstance.getAutoFollowStats();
|
||||
AutoFollowStats expectedAutoFollowStats = expectedInstance.getAutoFollowStats();
|
||||
assertThat(newAutoFollowStats.getNumberOfSuccessfulFollowIndices(),
|
||||
equalTo(expectedAutoFollowStats.getNumberOfSuccessfulFollowIndices()));
|
||||
assertThat(newAutoFollowStats.getNumberOfFailedRemoteClusterStateRequests(),
|
||||
equalTo(expectedAutoFollowStats.getNumberOfFailedRemoteClusterStateRequests()));
|
||||
assertThat(newAutoFollowStats.getNumberOfFailedFollowIndices(),
|
||||
equalTo(expectedAutoFollowStats.getNumberOfFailedFollowIndices()));
|
||||
assertThat(newAutoFollowStats.getRecentAutoFollowErrors().size(),
|
||||
equalTo(expectedAutoFollowStats.getRecentAutoFollowErrors().size()));
|
||||
assertThat(newAutoFollowStats.getRecentAutoFollowErrors().keySet(),
|
||||
equalTo(expectedAutoFollowStats.getRecentAutoFollowErrors().keySet()));
|
||||
for (final Map.Entry<String, ElasticsearchException> entry : newAutoFollowStats.getRecentAutoFollowErrors().entrySet()) {
|
||||
// x-content loses the exception
|
||||
final ElasticsearchException expected = expectedAutoFollowStats.getRecentAutoFollowErrors().get(entry.getKey());
|
||||
assertThat(entry.getValue().getMessage(), containsString(expected.getMessage()));
|
||||
assertNotNull(entry.getValue().getCause());
|
||||
assertThat(
|
||||
entry.getValue().getCause(),
|
||||
anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class)));
|
||||
assertThat(entry.getValue().getCause().getMessage(), containsString(expected.getCause().getMessage()));
|
||||
}
|
||||
}
|
||||
{
|
||||
IndicesFollowStats newIndicesFollowStats = newInstance.getIndicesFollowStats();
|
||||
IndicesFollowStats expectedIndicesFollowStats = expectedInstance.getIndicesFollowStats();
|
||||
assertThat(newIndicesFollowStats.getShardFollowStats().size(),
|
||||
equalTo(expectedIndicesFollowStats.getShardFollowStats().size()));
|
||||
assertThat(newIndicesFollowStats.getShardFollowStats().keySet(),
|
||||
equalTo(expectedIndicesFollowStats.getShardFollowStats().keySet()));
|
||||
for (Map.Entry<String, List<ShardFollowStats>> indexEntry : newIndicesFollowStats.getShardFollowStats().entrySet()) {
|
||||
List<ShardFollowStats> newStats = indexEntry.getValue();
|
||||
List<ShardFollowStats> expectedStats = expectedIndicesFollowStats.getShardFollowStats(indexEntry.getKey());
|
||||
assertThat(newStats.size(), equalTo(expectedStats.size()));
|
||||
for (int i = 0; i < newStats.size(); i++) {
|
||||
ShardFollowStats actualShardFollowStats = newStats.get(i);
|
||||
ShardFollowStats expectedShardFollowStats = expectedStats.get(i);
|
||||
|
||||
assertThat(actualShardFollowStats.getRemoteCluster(), equalTo(expectedShardFollowStats.getRemoteCluster()));
|
||||
assertThat(actualShardFollowStats.getLeaderIndex(), equalTo(expectedShardFollowStats.getLeaderIndex()));
|
||||
assertThat(actualShardFollowStats.getFollowerIndex(), equalTo(expectedShardFollowStats.getFollowerIndex()));
|
||||
assertThat(actualShardFollowStats.getShardId(), equalTo(expectedShardFollowStats.getShardId()));
|
||||
assertThat(actualShardFollowStats.getLeaderGlobalCheckpoint(),
|
||||
equalTo(expectedShardFollowStats.getLeaderGlobalCheckpoint()));
|
||||
assertThat(actualShardFollowStats.getLeaderMaxSeqNo(), equalTo(expectedShardFollowStats.getLeaderMaxSeqNo()));
|
||||
assertThat(actualShardFollowStats.getFollowerGlobalCheckpoint(),
|
||||
equalTo(expectedShardFollowStats.getFollowerGlobalCheckpoint()));
|
||||
assertThat(actualShardFollowStats.getLastRequestedSeqNo(), equalTo(expectedShardFollowStats.getLastRequestedSeqNo()));
|
||||
assertThat(actualShardFollowStats.getOutstandingReadRequests(),
|
||||
equalTo(expectedShardFollowStats.getOutstandingReadRequests()));
|
||||
assertThat(actualShardFollowStats.getOutstandingWriteRequests(),
|
||||
equalTo(expectedShardFollowStats.getOutstandingWriteRequests()));
|
||||
assertThat(actualShardFollowStats.getWriteBufferOperationCount(),
|
||||
equalTo(expectedShardFollowStats.getWriteBufferOperationCount()));
|
||||
assertThat(actualShardFollowStats.getFollowerMappingVersion(),
|
||||
equalTo(expectedShardFollowStats.getFollowerMappingVersion()));
|
||||
assertThat(actualShardFollowStats.getFollowerSettingsVersion(),
|
||||
equalTo(expectedShardFollowStats.getFollowerSettingsVersion()));
|
||||
assertThat(actualShardFollowStats.getTotalReadTimeMillis(),
|
||||
equalTo(expectedShardFollowStats.getTotalReadTimeMillis()));
|
||||
assertThat(actualShardFollowStats.getSuccessfulReadRequests(),
|
||||
equalTo(expectedShardFollowStats.getSuccessfulReadRequests()));
|
||||
assertThat(actualShardFollowStats.getFailedReadRequests(), equalTo(expectedShardFollowStats.getFailedReadRequests()));
|
||||
assertThat(actualShardFollowStats.getOperationsReads(), equalTo(expectedShardFollowStats.getOperationsReads()));
|
||||
assertThat(actualShardFollowStats.getBytesRead(), equalTo(expectedShardFollowStats.getBytesRead()));
|
||||
assertThat(actualShardFollowStats.getTotalWriteTimeMillis(),
|
||||
equalTo(expectedShardFollowStats.getTotalWriteTimeMillis()));
|
||||
assertThat(actualShardFollowStats.getSuccessfulWriteRequests(),
|
||||
equalTo(expectedShardFollowStats.getSuccessfulWriteRequests()));
|
||||
assertThat(actualShardFollowStats.getFailedWriteRequests(),
|
||||
equalTo(expectedShardFollowStats.getFailedWriteRequests()));
|
||||
assertThat(actualShardFollowStats.getOperationWritten(), equalTo(expectedShardFollowStats.getOperationWritten()));
|
||||
assertThat(actualShardFollowStats.getReadExceptions().size(),
|
||||
equalTo(expectedShardFollowStats.getReadExceptions().size()));
|
||||
assertThat(actualShardFollowStats.getReadExceptions().keySet(),
|
||||
equalTo(expectedShardFollowStats.getReadExceptions().keySet()));
|
||||
for (final Map.Entry<Long, Tuple<Integer, ElasticsearchException>> entry :
|
||||
actualShardFollowStats.getReadExceptions().entrySet()) {
|
||||
final Tuple<Integer, ElasticsearchException> expectedTuple =
|
||||
expectedShardFollowStats.getReadExceptions().get(entry.getKey());
|
||||
assertThat(entry.getValue().v1(), equalTo(expectedTuple.v1()));
|
||||
// x-content loses the exception
|
||||
final ElasticsearchException expected = expectedTuple.v2();
|
||||
assertThat(entry.getValue().v2().getMessage(), containsString(expected.getMessage()));
|
||||
assertNotNull(entry.getValue().v2().getCause());
|
||||
assertThat(
|
||||
entry.getValue().v2().getCause(),
|
||||
anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class)));
|
||||
assertThat(entry.getValue().v2().getCause().getMessage(), containsString(expected.getCause().getMessage()));
|
||||
}
|
||||
assertThat(actualShardFollowStats.getTimeSinceLastReadMillis(),
|
||||
equalTo(expectedShardFollowStats.getTimeSinceLastReadMillis()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void toXContent(CcrStatsResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
AutoFollowStats autoFollowStats = response.getAutoFollowStats();
|
||||
builder.startObject(CcrStatsResponse.AUTO_FOLLOW_STATS_FIELD.getPreferredName());
|
||||
{
|
||||
builder.field(AutoFollowStats.NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED.getPreferredName(),
|
||||
autoFollowStats.getNumberOfSuccessfulFollowIndices());
|
||||
builder.field(AutoFollowStats.NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS.getPreferredName(),
|
||||
autoFollowStats.getNumberOfFailedRemoteClusterStateRequests());
|
||||
builder.field(AutoFollowStats.NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED.getPreferredName(),
|
||||
autoFollowStats.getNumberOfFailedFollowIndices());
|
||||
builder.startArray(AutoFollowStats.RECENT_AUTO_FOLLOW_ERRORS.getPreferredName());
|
||||
for (Map.Entry<String, ElasticsearchException> entry : autoFollowStats.getRecentAutoFollowErrors().entrySet()) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(AutoFollowStats.LEADER_INDEX.getPreferredName(), entry.getKey());
|
||||
builder.field(AutoFollowStats.AUTO_FOLLOW_EXCEPTION.getPreferredName());
|
||||
builder.startObject();
|
||||
{
|
||||
ElasticsearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS, entry.getValue());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
IndicesFollowStats indicesFollowStats = response.getIndicesFollowStats();
|
||||
builder.startObject(CcrStatsResponse.FOLLOW_STATS_FIELD.getPreferredName());
|
||||
{
|
||||
builder.startArray(IndicesFollowStats.INDICES_FIELD.getPreferredName());
|
||||
for (Map.Entry<String, List<ShardFollowStats>> indexEntry :
|
||||
indicesFollowStats.getShardFollowStats().entrySet()) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(IndicesFollowStats.INDEX_FIELD.getPreferredName(), indexEntry.getKey());
|
||||
builder.startArray(IndicesFollowStats.SHARDS_FIELD.getPreferredName());
|
||||
{
|
||||
for (ShardFollowStats stats : indexEntry.getValue()) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(ShardFollowStats.LEADER_CLUSTER.getPreferredName(), stats.getRemoteCluster());
|
||||
builder.field(ShardFollowStats.LEADER_INDEX.getPreferredName(), stats.getLeaderIndex());
|
||||
builder.field(ShardFollowStats.FOLLOWER_INDEX.getPreferredName(), stats.getFollowerIndex());
|
||||
builder.field(ShardFollowStats.SHARD_ID.getPreferredName(), stats.getShardId());
|
||||
builder.field(ShardFollowStats.LEADER_GLOBAL_CHECKPOINT_FIELD.getPreferredName(),
|
||||
stats.getLeaderGlobalCheckpoint());
|
||||
builder.field(ShardFollowStats.LEADER_MAX_SEQ_NO_FIELD.getPreferredName(), stats.getLeaderMaxSeqNo());
|
||||
builder.field(ShardFollowStats.FOLLOWER_GLOBAL_CHECKPOINT_FIELD.getPreferredName(),
|
||||
stats.getFollowerGlobalCheckpoint());
|
||||
builder.field(ShardFollowStats.FOLLOWER_MAX_SEQ_NO_FIELD.getPreferredName(),
|
||||
stats.getFollowerMaxSeqNo());
|
||||
builder.field(ShardFollowStats.LAST_REQUESTED_SEQ_NO_FIELD.getPreferredName(),
|
||||
stats.getLastRequestedSeqNo());
|
||||
builder.field(ShardFollowStats.OUTSTANDING_READ_REQUESTS.getPreferredName(),
|
||||
stats.getOutstandingReadRequests());
|
||||
builder.field(ShardFollowStats.OUTSTANDING_WRITE_REQUESTS.getPreferredName(),
|
||||
stats.getOutstandingWriteRequests());
|
||||
builder.field(ShardFollowStats.WRITE_BUFFER_OPERATION_COUNT_FIELD.getPreferredName(),
|
||||
stats.getWriteBufferOperationCount());
|
||||
builder.humanReadableField(
|
||||
ShardFollowStats.WRITE_BUFFER_SIZE_IN_BYTES_FIELD.getPreferredName(),
|
||||
"write_buffer_size",
|
||||
new ByteSizeValue(stats.getWriteBufferSizeInBytes()));
|
||||
builder.field(ShardFollowStats.FOLLOWER_MAPPING_VERSION_FIELD.getPreferredName(),
|
||||
stats.getFollowerMappingVersion());
|
||||
builder.field(ShardFollowStats.FOLLOWER_SETTINGS_VERSION_FIELD.getPreferredName(),
|
||||
stats.getFollowerSettingsVersion());
|
||||
builder.humanReadableField(
|
||||
ShardFollowStats.TOTAL_READ_TIME_MILLIS_FIELD.getPreferredName(),
|
||||
"total_read_time",
|
||||
new TimeValue(stats.getTotalReadTimeMillis(), TimeUnit.MILLISECONDS));
|
||||
builder.humanReadableField(
|
||||
ShardFollowStats.TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD.getPreferredName(),
|
||||
"total_read_remote_exec_time",
|
||||
new TimeValue(stats.getTotalReadRemoteExecTimeMillis(), TimeUnit.MILLISECONDS));
|
||||
builder.field(ShardFollowStats.SUCCESSFUL_READ_REQUESTS_FIELD.getPreferredName(),
|
||||
stats.getSuccessfulReadRequests());
|
||||
builder.field(ShardFollowStats.FAILED_READ_REQUESTS_FIELD.getPreferredName(),
|
||||
stats.getFailedReadRequests());
|
||||
builder.field(ShardFollowStats.OPERATIONS_READ_FIELD.getPreferredName(), stats.getOperationsReads());
|
||||
builder.humanReadableField(
|
||||
ShardFollowStats.BYTES_READ.getPreferredName(),
|
||||
"total_read",
|
||||
new ByteSizeValue(stats.getBytesRead(), ByteSizeUnit.BYTES));
|
||||
builder.humanReadableField(
|
||||
ShardFollowStats.TOTAL_WRITE_TIME_MILLIS_FIELD.getPreferredName(),
|
||||
"total_write_time",
|
||||
new TimeValue(stats.getTotalWriteTimeMillis(), TimeUnit.MILLISECONDS));
|
||||
builder.field(ShardFollowStats.SUCCESSFUL_WRITE_REQUESTS_FIELD.getPreferredName(),
|
||||
stats.getSuccessfulWriteRequests());
|
||||
builder.field(ShardFollowStats.FAILED_WRITE_REQUEST_FIELD.getPreferredName(),
|
||||
stats.getFailedWriteRequests());
|
||||
builder.field(ShardFollowStats.OPERATIONS_WRITTEN.getPreferredName(), stats.getOperationWritten());
|
||||
builder.startArray(ShardFollowStats.READ_EXCEPTIONS.getPreferredName());
|
||||
{
|
||||
for (final Map.Entry<Long, Tuple<Integer, ElasticsearchException>> entry :
|
||||
stats.getReadExceptions().entrySet()) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(ShardFollowStats.READ_EXCEPTIONS_ENTRY_FROM_SEQ_NO.getPreferredName(),
|
||||
entry.getKey());
|
||||
builder.field(ShardFollowStats.READ_EXCEPTIONS_RETRIES.getPreferredName(),
|
||||
entry.getValue().v1());
|
||||
builder.field(ShardFollowStats.READ_EXCEPTIONS_ENTRY_EXCEPTION.getPreferredName());
|
||||
builder.startObject();
|
||||
{
|
||||
ElasticsearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS,
|
||||
entry.getValue().v2());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
builder.humanReadableField(
|
||||
ShardFollowStats.TIME_SINCE_LAST_READ_MILLIS_FIELD.getPreferredName(),
|
||||
"time_since_last_read",
|
||||
new TimeValue(stats.getTimeSinceLastReadMillis(), TimeUnit.MILLISECONDS));
|
||||
if (stats.getFatalException() != null) {
|
||||
builder.field(ShardFollowStats.FATAL_EXCEPTION.getPreferredName());
|
||||
builder.startObject();
|
||||
{
|
||||
ElasticsearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS,
|
||||
stats.getFatalException());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
private static CcrStatsResponse createTestInstance() {
|
||||
return new CcrStatsResponse(randomAutoFollowStats(), randomIndicesFollowStats());
|
||||
}
|
||||
|
||||
private static AutoFollowStats randomAutoFollowStats() {
|
||||
final int count = randomIntBetween(0, 16);
|
||||
final NavigableMap<String, ElasticsearchException> readExceptions = new TreeMap<>();
|
||||
for (int i = 0; i < count; i++) {
|
||||
readExceptions.put("" + i, new ElasticsearchException(new IllegalStateException("index [" + i + "]")));
|
||||
}
|
||||
return new AutoFollowStats(
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
readExceptions
|
||||
);
|
||||
}
|
||||
|
||||
private static IndicesFollowStats randomIndicesFollowStats() {
|
||||
int numIndices = randomIntBetween(0, 16);
|
||||
NavigableMap<String, List<ShardFollowStats>> shardFollowStats = new TreeMap<>();
|
||||
for (int i = 0; i < numIndices; i++) {
|
||||
String index = randomAlphaOfLength(4);
|
||||
int numShards = randomIntBetween(0, 5);
|
||||
List<ShardFollowStats> stats = new ArrayList<>(numShards);
|
||||
shardFollowStats.put(index, stats);
|
||||
for (int j = 0; j < numShards; j++) {
|
||||
final int count = randomIntBetween(0, 16);
|
||||
final NavigableMap<Long, Tuple<Integer, ElasticsearchException>> readExceptions = new TreeMap<>();
|
||||
for (long k = 0; k < count; k++) {
|
||||
readExceptions.put(k, new Tuple<>(randomIntBetween(0, Integer.MAX_VALUE),
|
||||
new ElasticsearchException(new IllegalStateException("index [" + k + "]"))));
|
||||
}
|
||||
|
||||
stats.add(new ShardFollowStats(
|
||||
randomAlphaOfLength(4),
|
||||
randomAlphaOfLength(4),
|
||||
randomAlphaOfLength(4),
|
||||
randomInt(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomIntBetween(0, Integer.MAX_VALUE),
|
||||
randomIntBetween(0, Integer.MAX_VALUE),
|
||||
randomIntBetween(0, Integer.MAX_VALUE),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomLong(),
|
||||
readExceptions,
|
||||
randomBoolean() ? new ElasticsearchException("fatal error") : null));
|
||||
}
|
||||
}
|
||||
return new IndicesFollowStats(shardFollowStats);
|
||||
}
|
||||
|
||||
}
|
@ -33,10 +33,14 @@ import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.ccr.AutoFollowStats;
|
||||
import org.elasticsearch.client.ccr.CcrStatsRequest;
|
||||
import org.elasticsearch.client.ccr.CcrStatsResponse;
|
||||
import org.elasticsearch.client.ccr.DeleteAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternResponse;
|
||||
import org.elasticsearch.client.ccr.GetAutoFollowPatternResponse.Pattern;
|
||||
import org.elasticsearch.client.ccr.IndicesFollowStats;
|
||||
import org.elasticsearch.client.ccr.PauseFollowRequest;
|
||||
import org.elasticsearch.client.ccr.PutAutoFollowPatternRequest;
|
||||
import org.elasticsearch.client.ccr.PutFollowRequest;
|
||||
@ -568,6 +572,57 @@ public class CCRDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetCCRStats() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::ccr-get-stats-request
|
||||
CcrStatsRequest request =
|
||||
new CcrStatsRequest(); // <1>
|
||||
// end::ccr-get-stats-request
|
||||
|
||||
// tag::ccr-get-stats-execute
|
||||
CcrStatsResponse response = client.ccr()
|
||||
.getCcrStats(request, RequestOptions.DEFAULT);
|
||||
// end::ccr-get-stats-execute
|
||||
|
||||
// tag::ccr-get-stats-response
|
||||
IndicesFollowStats indicesFollowStats =
|
||||
response.getIndicesFollowStats(); // <1>
|
||||
AutoFollowStats autoFollowStats =
|
||||
response.getAutoFollowStats(); // <2>
|
||||
// end::ccr-get-stats-response
|
||||
|
||||
// tag::ccr-get-stats-execute-listener
|
||||
ActionListener<CcrStatsResponse> listener =
|
||||
new ActionListener<CcrStatsResponse>() {
|
||||
@Override
|
||||
public void onResponse(CcrStatsResponse response) { // <1>
|
||||
IndicesFollowStats indicesFollowStats =
|
||||
response.getIndicesFollowStats();
|
||||
AutoFollowStats autoFollowStats =
|
||||
response.getAutoFollowStats();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::ccr-get-stats-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::ccr-get-stats-execute-async
|
||||
client.ccr().getCcrStatsAsync(request,
|
||||
RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::ccr-get-stats-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
|
||||
static Map<String, Object> toMap(Response response) throws IOException {
|
||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false);
|
||||
}
|
||||
|
@ -34,6 +34,8 @@ import org.elasticsearch.client.watcher.ActionStatus;
|
||||
import org.elasticsearch.client.watcher.ActionStatus.AckStatus;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeactivateWatchResponse;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.ExecuteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.GetWatchRequest;
|
||||
import org.elasticsearch.client.watcher.GetWatchResponse;
|
||||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
@ -43,6 +45,7 @@ import org.elasticsearch.client.watcher.WatcherStatsRequest;
|
||||
import org.elasticsearch.client.watcher.WatcherStatsResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ObjectPath;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchResponse;
|
||||
@ -51,6 +54,7 @@ import org.elasticsearch.client.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@ -199,6 +203,52 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::x-pack-execute-watch-by-id
|
||||
ExecuteWatchRequest request = ExecuteWatchRequest.byId("my_watch_id");
|
||||
request.setAlternativeInput("{ \"foo\" : \"bar\" }"); // <1>
|
||||
request.setActionMode("action1", ExecuteWatchRequest.ActionExecutionMode.SIMULATE); // <2>
|
||||
request.setRecordExecution(true); // <3>
|
||||
request.setIgnoreCondition(true); // <4>
|
||||
request.setTriggerData("{\"triggered_time\":\"now\"}"); // <5>
|
||||
request.setDebug(true); // <6>
|
||||
ExecuteWatchResponse response = client.watcher().executeWatch(request, RequestOptions.DEFAULT);
|
||||
// end::x-pack-execute-watch-by-id
|
||||
|
||||
// tag::x-pack-execute-watch-by-id-response
|
||||
String id = response.getRecordId(); // <1>
|
||||
Map<String, Object> watch = response.getRecordAsMap(); // <2>
|
||||
String watch_id = ObjectPath.eval("watch_record.watch_id", watch); // <3>
|
||||
// end::x-pack-execute-watch-by-id-response
|
||||
}
|
||||
|
||||
{
|
||||
ExecuteWatchRequest request = ExecuteWatchRequest.byId("my_watch_id");
|
||||
// tag::x-pack-execute-watch-by-id-execute-listener
|
||||
ActionListener<ExecuteWatchResponse> listener = new ActionListener<ExecuteWatchResponse>() {
|
||||
@Override
|
||||
public void onResponse(ExecuteWatchResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-execute-watch-by-id-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-execute-watch-by-id-execute-async
|
||||
client.watcher().executeWatchAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-execute-watch-by-id-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
{
|
||||
//tag::get-watch-request
|
||||
GetWatchRequest request = new GetWatchRequest("my_watch_id");
|
||||
@ -285,6 +335,65 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testExecuteInlineWatch() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
// tag::x-pack-execute-inline-watch
|
||||
String watchJson = "{ \n" +
|
||||
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
|
||||
" \"input\": { \"none\": {} },\n" +
|
||||
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
|
||||
"}";
|
||||
ExecuteWatchRequest request = ExecuteWatchRequest.inline(watchJson);
|
||||
request.setAlternativeInput("{ \"foo\" : \"bar\" }"); // <1>
|
||||
request.setActionMode("action1", ExecuteWatchRequest.ActionExecutionMode.SIMULATE); // <2>
|
||||
request.setIgnoreCondition(true); // <3>
|
||||
request.setTriggerData("{\"triggered_time\":\"now\"}"); // <4>
|
||||
request.setDebug(true); // <5>
|
||||
ExecuteWatchResponse response = client.watcher().executeWatch(request, RequestOptions.DEFAULT);
|
||||
// end::x-pack-execute-inline-watch
|
||||
|
||||
// tag::x-pack-execute-watch-by-id-response
|
||||
String id = response.getRecordId(); // <1>
|
||||
Map<String, Object> watch = response.getRecordAsMap(); // <2>
|
||||
String watch_id = ObjectPath.eval("watch_record.watch_id", watch); // <3>
|
||||
// end::x-pack-execute-watch-by-id-response
|
||||
}
|
||||
|
||||
{
|
||||
String watchJson = "{ \n" +
|
||||
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
|
||||
" \"input\": { \"none\": {} },\n" +
|
||||
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
|
||||
"}";
|
||||
ExecuteWatchRequest request = ExecuteWatchRequest.inline(watchJson);
|
||||
// tag::x-pack-execute-inline-watch-execute-listener
|
||||
ActionListener<ExecuteWatchResponse> listener = new ActionListener<ExecuteWatchResponse>() {
|
||||
@Override
|
||||
public void onResponse(ExecuteWatchResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-execute-inline-watch-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-execute-inline-watch-execute-async
|
||||
client.watcher().executeWatchAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-execute-inline-watch-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testAckWatch() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -151,7 +151,7 @@ subprojects {
|
||||
}
|
||||
|
||||
// sanity checks if archives can be extracted
|
||||
final File archiveExtractionDir
|
||||
File archiveExtractionDir
|
||||
if (project.name.contains('tar')) {
|
||||
archiveExtractionDir = new File(buildDir, 'tar-extracted')
|
||||
} else {
|
||||
@ -179,7 +179,7 @@ subprojects {
|
||||
}
|
||||
}
|
||||
|
||||
final Closure toolExists
|
||||
Closure toolExists
|
||||
if (project.name.contains('tar')) {
|
||||
toolExists = tarExists
|
||||
} else {
|
||||
@ -192,7 +192,7 @@ subprojects {
|
||||
dependsOn buildDist, checkExtraction
|
||||
onlyIf toolExists
|
||||
doLast {
|
||||
final String licenseFilename
|
||||
String licenseFilename = null
|
||||
if (project.name.contains('oss-') || project.name == 'integ-test-zip') {
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
|
@ -45,7 +45,7 @@ bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unre
|
||||
|
||||
final String remote = System.getProperty("tests.bwc.remote", "elastic")
|
||||
|
||||
final boolean gitFetchLatest
|
||||
boolean gitFetchLatest
|
||||
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
|
||||
if ("true".equals(gitFetchLatestProperty)) {
|
||||
gitFetchLatest = true
|
||||
|
@ -359,7 +359,7 @@ subprojects {
|
||||
|
||||
// sanity checks if packages can be extracted
|
||||
final File extractionDir = new File(buildDir, 'extracted')
|
||||
final File packageExtractionDir
|
||||
File packageExtractionDir
|
||||
if (project.name.contains('deb')) {
|
||||
packageExtractionDir = new File(extractionDir, 'deb-extracted')
|
||||
} else {
|
||||
@ -406,9 +406,9 @@ subprojects {
|
||||
checkLicense {
|
||||
onlyIf dpkgExists
|
||||
doLast {
|
||||
final Path copyrightPath
|
||||
final String expectedLicense
|
||||
final String licenseFilename
|
||||
Path copyrightPath
|
||||
String expectedLicense
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright")
|
||||
expectedLicense = "ASL-2.0"
|
||||
@ -431,7 +431,7 @@ subprojects {
|
||||
checkLicense {
|
||||
onlyIf rpmExists
|
||||
doLast {
|
||||
final String licenseFilename
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
@ -466,7 +466,7 @@ subprojects {
|
||||
exec.commandLine 'dpkg-deb', '--info', "${ -> buildDist.outputs.files.filter(debFilter).singleFile}"
|
||||
exec.standardOutput = output
|
||||
doLast {
|
||||
final String expectedLicense
|
||||
String expectedLicense
|
||||
if (project.name.contains('oss-')) {
|
||||
expectedLicense = "ASL-2.0"
|
||||
} else {
|
||||
@ -502,8 +502,8 @@ subprojects {
|
||||
exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}"
|
||||
exec.standardOutput = output
|
||||
doLast {
|
||||
final String license = output.toString('UTF-8')
|
||||
final String expectedLicense
|
||||
String license = output.toString('UTF-8')
|
||||
String expectedLicense
|
||||
if (project.name.contains('oss-')) {
|
||||
expectedLicense = "ASL 2.0"
|
||||
} else {
|
||||
|
37
docs/java-rest/high-level/ccr/get_stats.asciidoc
Normal file
37
docs/java-rest/high-level/ccr/get_stats.asciidoc
Normal file
@ -0,0 +1,37 @@
|
||||
--
|
||||
:api: ccr-get-stats
|
||||
:request: CcrStatsRequest
|
||||
:response: CcrStatsResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Get CCR Stats API
|
||||
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The Get CCR Stats API allows you to get statistics about index following and auto following.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> The request accepts no parameters.
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ always includes index follow statistics of all follow indices and
|
||||
auto follow statistics.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The follow stats of active follower indices.
|
||||
<2> The auto follow stats of the cluster that has been queried.
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
|
@ -427,6 +427,7 @@ The Java High Level REST Client supports the following Watcher APIs:
|
||||
* <<java-rest-high-watcher-deactivate-watch>>
|
||||
* <<{upid}-ack-watch>>
|
||||
* <<{upid}-activate-watch>>
|
||||
* <<{upid}-execute-watch>>
|
||||
* <<{upid}-watcher-stats>>
|
||||
|
||||
include::watcher/start-watch-service.asciidoc[]
|
||||
@ -437,6 +438,7 @@ include::watcher/delete-watch.asciidoc[]
|
||||
include::watcher/ack-watch.asciidoc[]
|
||||
include::watcher/deactivate-watch.asciidoc[]
|
||||
include::watcher/activate-watch.asciidoc[]
|
||||
include::watcher/execute-watch.asciidoc[]
|
||||
include::watcher/watcher-stats.asciidoc[]
|
||||
|
||||
== Graph APIs
|
||||
@ -473,6 +475,7 @@ The Java High Level REST Client supports the following CCR APIs:
|
||||
* <<{upid}-ccr-put-auto-follow-pattern>>
|
||||
* <<{upid}-ccr-delete-auto-follow-pattern>>
|
||||
* <<{upid}-ccr-get-auto-follow-pattern>>
|
||||
* <<{upid}-ccr-get-stats>>
|
||||
|
||||
include::ccr/put_follow.asciidoc[]
|
||||
include::ccr/pause_follow.asciidoc[]
|
||||
@ -481,6 +484,7 @@ include::ccr/unfollow.asciidoc[]
|
||||
include::ccr/put_auto_follow_pattern.asciidoc[]
|
||||
include::ccr/delete_auto_follow_pattern.asciidoc[]
|
||||
include::ccr/get_auto_follow_pattern.asciidoc[]
|
||||
include::ccr/get_stats.asciidoc[]
|
||||
|
||||
== Index Lifecycle Management APIs
|
||||
|
||||
|
88
docs/java-rest/high-level/watcher/execute-watch.asciidoc
Normal file
88
docs/java-rest/high-level/watcher/execute-watch.asciidoc
Normal file
@ -0,0 +1,88 @@
|
||||
--
|
||||
:api: execute-watch
|
||||
:request: ExecuteWatchRequest
|
||||
:response: ExecuteWatchResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Execute Watch API
|
||||
|
||||
The execute watch API allows clients to immediately execute a watch, either
|
||||
one that has been previously added via the
|
||||
{ref}/put-watch.html[Put Watch API] or inline as part of the request.
|
||||
|
||||
[id="{upid}-{api}-request-by-id"]
|
||||
==== Execute by id
|
||||
|
||||
Submit the following request to execute a previously added watch:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[x-pack-execute-watch-by-id]
|
||||
---------------------------------------------------
|
||||
<1> Alternative input for the watch to use in json format
|
||||
<2> Set the mode for action "action1" to SIMULATE
|
||||
<3> Record this execution in watcher history
|
||||
<4> Execute the watch regardless of the watch's condition
|
||||
<5> Set the trigger data for the watch in json format
|
||||
<6> Enable debug mode
|
||||
|
||||
[id="{upid}-{api}-response-by-id"]
|
||||
==== Execute by id Response
|
||||
|
||||
The returned `Response` contains details of the execution:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[x-pack-execute-watch-by-id-response]
|
||||
---------------------------------------------------
|
||||
<1> The record ID for this execution
|
||||
<2> The execution response as a java `Map`
|
||||
<3> Extract information from the response map using `ObjectPath`
|
||||
|
||||
[id="{upid}-{api}-response-by-id-async"]
|
||||
==== Asynchronous execution by id
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[x-pack-execute-watch-by-id-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ExecuteWatchRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `ExecuteWatchResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[x-pack-execute-watch-by-id-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
|
||||
[id="{upid}-{api}-request-inline"]
|
||||
==== Execute inline
|
||||
|
||||
Submit the following request to execute a watch defined as part of the request:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[x-pack-execute-watch-inline]
|
||||
---------------------------------------------------
|
||||
<1> Alternative input for the watch to use in json format
|
||||
<2> Set the mode for action "action1" to SIMULATE
|
||||
<3> Execute the watch regardless of the watch's condition
|
||||
<4> Set the trigger data for the watch in json format
|
||||
<5> Enable debug mode
|
||||
|
||||
Note that inline watches cannot be recorded.
|
||||
|
||||
The response format and asynchronous execution methods are the same as for the
|
||||
Execute Watch by ID API.
|
@ -200,13 +200,13 @@ now returns matches from the new index:
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 0.2876821,
|
||||
"max_score": 0.13076457,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "new_index", <1>
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 0.2876821,
|
||||
"_score": 0.13076457,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -395,13 +395,13 @@ This results in a response like this:
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 0.2876821,
|
||||
"max_score": 0.13076457,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "test_index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 0.2876821,
|
||||
"_score": 0.13076457,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -554,13 +554,13 @@ GET /my_queries1/_search
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 0.41501677,
|
||||
"max_score": 0.18864399,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my_queries1",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 0.41501677,
|
||||
"_score": 0.18864399,
|
||||
"_source": {
|
||||
"query": {
|
||||
"term": {
|
||||
|
@ -90,13 +90,13 @@ The above request will yield the following response:
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 0.5753642,
|
||||
"max_score": 0.26152915,
|
||||
"hits": [
|
||||
{ <1>
|
||||
"_index": "my-index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 0.5753642,
|
||||
"_score": 0.26152915,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -238,13 +238,13 @@ GET /my-index/_search
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 1.5606477,
|
||||
"max_score": 0.7093853,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my-index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 1.5606477,
|
||||
"_score": 0.7093853,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -418,13 +418,13 @@ This will yield the following response.
|
||||
},
|
||||
"hits": {
|
||||
"total": 2,
|
||||
"max_score": 0.5753642,
|
||||
"max_score": 0.26152915,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my-index",
|
||||
"_type": "_doc",
|
||||
"_id": "3",
|
||||
"_score": 0.5753642,
|
||||
"_score": 0.26152915,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -445,7 +445,7 @@ This will yield the following response.
|
||||
"_index": "my-index",
|
||||
"_type": "_doc",
|
||||
"_id": "4",
|
||||
"_score": 0.5753642,
|
||||
"_score": 0.26152915,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -523,13 +523,13 @@ The slightly different response:
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 1.5606477,
|
||||
"max_score": 0.7093853,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my-index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 1.5606477,
|
||||
"_score": 0.7093853,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
@ -619,13 +619,13 @@ The above search request returns a response similar to this:
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 0.5753642,
|
||||
"max_score": 0.26152915,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my-index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": 0.5753642,
|
||||
"_score": 0.26152915,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
|
@ -43,7 +43,7 @@ This will yield the following result:
|
||||
"details":[
|
||||
{
|
||||
"value":2.2,
|
||||
"description":"scaling factor, k1 + 1",
|
||||
"description":"boost",
|
||||
"details":[]
|
||||
},
|
||||
{
|
||||
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10-all.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.0-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionSha256Sum=fc049dcbcb245d5892bebae143bd515a78f6a5a93cec99d489b312dc0ce4aad9
|
||||
distributionSha256Sum=17847c8e12b2bcfce26a79f425f082c31d4ded822f99a66127eee2d96bf18216
|
||||
|
2
gradlew
vendored
2
gradlew
vendored
@ -28,7 +28,7 @@ APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
BIN
gradlew.bat
vendored
BIN
gradlew.bat
vendored
Binary file not shown.
@ -99,7 +99,7 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream tokenStream) {
|
||||
return new WordDelimiterGraphFilter(tokenStream, charTypeTable, flags, protoWords);
|
||||
return new WordDelimiterGraphFilter(tokenStream, true, charTypeTable, flags, protoWords);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1 +0,0 @@
|
||||
65b85d26f4eb4d23b98aaeffc9b1054c23d0227b
|
@ -0,0 +1 @@
|
||||
4a1574a3d3fcb950b440e36b3035f90885794bbf
|
@ -107,7 +107,7 @@ public class PainlessExecuteApiTests extends ESSingleNodeTestCase {
|
||||
"Math.round((_score + (doc['rank'].value / params.max_rank)) * 100.0) / 100.0", singletonMap("max_rank", 5.0)), "score",
|
||||
contextSetup);
|
||||
Response response = innerShardOperation(request, scriptService, indexService);
|
||||
assertThat(response.getResult(), equalTo(1.09D));
|
||||
assertThat(response.getResult(), equalTo(0.93D));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.reindex;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
public class RethrottleAction extends Action<ListTasksResponse> {
|
||||
public static final RethrottleAction INSTANCE = new RethrottleAction();
|
||||
@ -32,6 +33,11 @@ public class RethrottleAction extends Action<ListTasksResponse> {
|
||||
|
||||
@Override
|
||||
public ListTasksResponse newResponse() {
|
||||
return new ListTasksResponse();
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writeable.Reader<ListTasksResponse> getResponseReader() {
|
||||
return ListTasksResponse::new;
|
||||
}
|
||||
}
|
||||
|
@ -29,13 +29,11 @@ import org.elasticsearch.action.support.tasks.TransportTasksAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class TransportRethrottleAction extends TransportTasksAction<BulkByScrollTask, RethrottleRequest, ListTasksResponse, TaskInfo> {
|
||||
@ -45,7 +43,7 @@ public class TransportRethrottleAction extends TransportTasksAction<BulkByScroll
|
||||
public TransportRethrottleAction(ClusterService clusterService, TransportService transportService,
|
||||
ActionFilters actionFilters, Client client) {
|
||||
super(RethrottleAction.NAME, clusterService, transportService, actionFilters,
|
||||
RethrottleRequest::new, ListTasksResponse::new, ThreadPool.Names.MANAGEMENT);
|
||||
RethrottleRequest::new, ListTasksResponse::new, TaskInfo::new, ThreadPool.Names.MANAGEMENT);
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@ -101,11 +99,6 @@ public class TransportRethrottleAction extends TransportTasksAction<BulkByScroll
|
||||
listener.onResponse(task.taskInfo(localNodeId, true));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TaskInfo readTaskResponse(StreamInput in) throws IOException {
|
||||
return new TaskInfo(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ListTasksResponse newResponse(RethrottleRequest request, List<TaskInfo> tasks,
|
||||
List<TaskOperationFailure> taskOperationFailures, List<FailedNodeException> failedNodeExceptions) {
|
||||
|
@ -1 +0,0 @@
|
||||
2c31180c0afaf7ce10244175c68a9189e57b456b
|
@ -0,0 +1 @@
|
||||
428b4a9e84b4e903dfadb4dd1e1ef2cdd98cce08
|
@ -1 +0,0 @@
|
||||
d39dee7d510aecb9437a1e438ec19cf4398d8792
|
@ -0,0 +1 @@
|
||||
d08ee1049d04f672175ea9ba3132f7eaa98d9742
|
@ -1 +0,0 @@
|
||||
1f3ce32163fbf344f82d18b61715dc0891c22e00
|
@ -0,0 +1 @@
|
||||
841a9bd3a0e12b15b700c0655a76e4035d3128ae
|
@ -1 +0,0 @@
|
||||
6d378fb5b5a904cd3e3a1b1f3bab8b7c5cbc9d85
|
@ -0,0 +1 @@
|
||||
e9bfd4935d1a5d55154cb99a066a03797174bc33
|
@ -1 +0,0 @@
|
||||
df4957389f85da32b553dd901f30767879a507f2
|
@ -0,0 +1 @@
|
||||
6a933a5113a708229177463c94d53ea544414a53
|
@ -1 +0,0 @@
|
||||
210ea4e9423e03cd3f6ea9b8e81cab727101d3cb
|
@ -0,0 +1 @@
|
||||
7709b470601b0c1a77fdcd5dd9ce9f48aba3db78
|
@ -1 +0,0 @@
|
||||
3c345959ae03ae458be1590c2ac782b2a621abb2
|
@ -0,0 +1 @@
|
||||
00062c609614d7229c5869d7d8988674ffaea350
|
@ -301,6 +301,7 @@ if (secureFixtureSupported) {
|
||||
// Security tests unsupported. Don't run these tests.
|
||||
integTestSecure.enabled = false
|
||||
integTestSecureHa.enabled = false
|
||||
testingConventions.enabled = false
|
||||
}
|
||||
|
||||
thirdPartyAudit.excludes = [
|
||||
|
@ -73,10 +73,7 @@ task testRepositoryCreds(type: RandomizedTestingTask) {
|
||||
include '**/RepositoryCredentialsTests.class'
|
||||
include '**/S3BlobStoreRepositoryTests.class'
|
||||
systemProperty 'es.allow_insecure_settings', 'true'
|
||||
classpath = tasks.test.classpath
|
||||
testClassesDirs = tasks.test.testClassesDirs
|
||||
}
|
||||
project.check.dependsOn(testRepositoryCreds)
|
||||
|
||||
test {
|
||||
// these are tested explicitly in separate test tasks
|
||||
@ -145,8 +142,8 @@ final String minioBinDir = "${buildDir}/minio/bin"
|
||||
final String minioDataDir = "${buildDir}/minio/data"
|
||||
final String minioAddress = "127.0.0.1"
|
||||
|
||||
final String minioDistribution
|
||||
final String minioCheckSum
|
||||
String minioDistribution
|
||||
String minioCheckSum
|
||||
if (Os.isFamily(Os.FAMILY_MAC)) {
|
||||
minioDistribution = 'darwin-amd64'
|
||||
minioCheckSum = '96b0bcb2f590e8e65fb83d5c3e221f9bd1106b49fa6f22c6b726b80b845d7c60'
|
||||
|
@ -38,7 +38,7 @@ repositories {
|
||||
// the Wildfly distribution is not available via a repository, so we fake an Ivy repository on top of the download site
|
||||
ivy {
|
||||
url "http://download.jboss.org"
|
||||
layout 'pattern', {
|
||||
patternLayout {
|
||||
artifact 'wildfly/[revision]/[module]-[revision].[ext]'
|
||||
}
|
||||
}
|
||||
|
@ -320,10 +320,6 @@ if (isEclipse == false || project.path == ":server-tests") {
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Multi-node tests',
|
||||
dependsOn: test.dependsOn) {
|
||||
classpath = project.test.classpath
|
||||
testClassesDirs = project.test.testClassesDirs
|
||||
include '**/*IT.class'
|
||||
}
|
||||
check.dependsOn integTest
|
||||
integTest.mustRunAfter test
|
||||
}
|
||||
|
@ -1 +0,0 @@
|
||||
1e557f096cd55fd1f20104b1fb4c0d0095e03fd2
|
@ -0,0 +1 @@
|
||||
ada03def6399ef5606a77c93ee45514701b98987
|
@ -1 +0,0 @@
|
||||
77c1844fd0b17e26fb4facb94f6140e98a6bbd49
|
@ -0,0 +1 @@
|
||||
c21b7cb3d2a3f34ea73b915cc15c67f203876ddf
|
@ -1 +0,0 @@
|
||||
20b559db91bda12f7b242c516915aad26e654baa
|
@ -0,0 +1 @@
|
||||
a6149ea94d695ebad4e5037f2926ca20c768777d
|
@ -1 +0,0 @@
|
||||
24e4eb6703be36c910bd0d7e3f060259602131b8
|
@ -0,0 +1 @@
|
||||
88de707d0913f9240114091a22bc178627792de3
|
@ -1 +0,0 @@
|
||||
1a9acefd0d7a9348f62fb0ea307853fe06cebc63
|
@ -0,0 +1 @@
|
||||
9812a19bdccd3646fde3db3ed53ce17c8ecd2c72
|
@ -1 +0,0 @@
|
||||
941fa34281837c5d2a62d67657618b4d6e92c6d7
|
@ -0,0 +1 @@
|
||||
9877d38f3f966352812888014b9dd0fcd861b418
|
@ -1 +0,0 @@
|
||||
eb78318f2a76b2013857ba72e0ddc42141bad36e
|
@ -0,0 +1 @@
|
||||
2ae87d38ad6b9f349de1a14c9fa2bc36d1e1126e
|
@ -1 +0,0 @@
|
||||
ce90ede863c08726d7ae70f9f15443f122674d89
|
@ -0,0 +1 @@
|
||||
cb167b153ee422e222b314fb1aacf07742079b18
|
@ -1 +0,0 @@
|
||||
e3b889834b8b43f3c5b718ee0b1b2fd198aa9467
|
@ -0,0 +1 @@
|
||||
5461afee0210ce1d2e9336e0a3f94ea7da64e491
|
@ -1 +0,0 @@
|
||||
f4c6c02a0834d582a918c895a715a74f40195297
|
@ -0,0 +1 @@
|
||||
28fd369ca80e1bee4a9830723348363850f25f91
|
@ -1 +0,0 @@
|
||||
7ed65e999af74d9356180c91176bcf0bcdf80b6a
|
@ -0,0 +1 @@
|
||||
7139424ecadad80df8127497f06d08d037c5e9cd
|
@ -1 +0,0 @@
|
||||
28a64cb272639b610064291e726f2a1792c224f2
|
@ -0,0 +1 @@
|
||||
82f9b91f2e288af0b9cee8ccc561655f9d07ed70
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user