mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-01 08:29:09 +00:00
Merge branch 'master' into close-index-api-refactoring
This commit is contained in:
commit
7f6fe14b66
@ -223,6 +223,18 @@ if (project != rootProject) {
|
|||||||
integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase'
|
integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.elasticsearch.gradle.test.GradleUnitTestCase'
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.gradle.test.GradleIntegrationTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* We alread configure publication and we don't need or want this one that
|
* We alread configure publication and we don't need or want this one that
|
||||||
* comes from the java-gradle-plugin.
|
* comes from the java-gradle-plugin.
|
||||||
|
@ -78,6 +78,19 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||||||
skipIntegTestInDisguise = true
|
skipIntegTestInDisguise = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
project.testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.apache.lucene.util.LuceneTestCase'
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.test.ESIntegTestCase'
|
||||||
|
baseClass 'org.elasticsearch.test.rest.ESRestTestCase'
|
||||||
|
baseClass 'org.elasticsearch.test.ESSingleNodeTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
createIntegTestTask(project)
|
createIntegTestTask(project)
|
||||||
createBundleTask(project)
|
createBundleTask(project)
|
||||||
project.configurations.getByName('default').extendsFrom(project.configurations.getByName('runtime'))
|
project.configurations.getByName('default').extendsFrom(project.configurations.getByName('runtime'))
|
||||||
|
@ -91,7 +91,17 @@ class PrecommitTasks {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static Task configureTestingConventions(Project project) {
|
static Task configureTestingConventions(Project project) {
|
||||||
project.getTasks().create("testingConventions", TestingConventionsTasks.class)
|
TestingConventionsTasks task = project.getTasks().create("testingConventions", TestingConventionsTasks.class)
|
||||||
|
task.naming {
|
||||||
|
Tests {
|
||||||
|
baseClass "org.apache.lucene.util.LuceneTestCase"
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClass "org.elasticsearch.test.ESIntegTestCase"
|
||||||
|
baseClass 'org.elasticsearch.test.rest.ESRestTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return task
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Task configureJarHell(Project project) {
|
private static Task configureJarHell(Project project) {
|
||||||
|
@ -0,0 +1,99 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represent rules for tests enforced by the @{link {@link TestingConventionsTasks}}
|
||||||
|
*
|
||||||
|
* Rules are identified by name, tests must have this name as a suffix and implement one of the base classes
|
||||||
|
* and be part of all the specified tasks.
|
||||||
|
*/
|
||||||
|
public class TestingConventionRule implements Serializable {
|
||||||
|
|
||||||
|
private final String suffix;
|
||||||
|
|
||||||
|
private Set<String> baseClasses = new HashSet<>();
|
||||||
|
|
||||||
|
private Set<Pattern> taskNames = new HashSet<>();
|
||||||
|
|
||||||
|
public TestingConventionRule(String suffix) {
|
||||||
|
this.suffix = suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSuffix() {
|
||||||
|
return suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alias for @{link getSuffix} as Gradle requires a name property
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public String getName() {
|
||||||
|
return suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void baseClass(String clazz) {
|
||||||
|
baseClasses.add(clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBaseClasses(Collection<String> baseClasses) {
|
||||||
|
this.baseClasses.clear();
|
||||||
|
this.baseClasses.addAll(baseClasses);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void taskName(Pattern expression) {
|
||||||
|
taskNames.add(expression);
|
||||||
|
}
|
||||||
|
public void taskName(String expression) {
|
||||||
|
taskNames.add(Pattern.compile(expression));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTaskNames(Collection<Pattern> expressions) {
|
||||||
|
taskNames.clear();
|
||||||
|
taskNames.addAll(expressions);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<String> getBaseClasses() {
|
||||||
|
return baseClasses;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<Pattern> getTaskNames() {
|
||||||
|
return taskNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
TestingConventionRule that = (TestingConventionRule) o;
|
||||||
|
return Objects.equals(suffix, that.suffix);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(suffix);
|
||||||
|
}
|
||||||
|
}
|
@ -18,8 +18,10 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.gradle.precommit;
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
|
import groovy.lang.Closure;
|
||||||
import org.elasticsearch.gradle.tool.Boilerplate;
|
import org.elasticsearch.gradle.tool.Boilerplate;
|
||||||
import org.gradle.api.DefaultTask;
|
import org.gradle.api.DefaultTask;
|
||||||
|
import org.gradle.api.NamedDomainObjectContainer;
|
||||||
import org.gradle.api.Task;
|
import org.gradle.api.Task;
|
||||||
import org.gradle.api.file.FileCollection;
|
import org.gradle.api.file.FileCollection;
|
||||||
import org.gradle.api.file.FileTree;
|
import org.gradle.api.file.FileTree;
|
||||||
@ -54,50 +56,37 @@ import java.util.stream.Stream;
|
|||||||
|
|
||||||
public class TestingConventionsTasks extends DefaultTask {
|
public class TestingConventionsTasks extends DefaultTask {
|
||||||
|
|
||||||
private static final String TEST_CLASS_SUFIX = "Tests";
|
|
||||||
private static final String INTEG_TEST_CLASS_SUFIX = "IT";
|
|
||||||
private static final String TEST_METHOD_PREFIX = "test";
|
private static final String TEST_METHOD_PREFIX = "test";
|
||||||
|
|
||||||
/**
|
|
||||||
* Are there tests to execute ? Accounts for @Ignore and @AwaitsFix
|
|
||||||
*/
|
|
||||||
private Boolean activeTestsExists;
|
|
||||||
|
|
||||||
private Map<String, File> testClassNames;
|
private Map<String, File> testClassNames;
|
||||||
|
|
||||||
|
private final NamedDomainObjectContainer<TestingConventionRule> naming;
|
||||||
|
|
||||||
public TestingConventionsTasks() {
|
public TestingConventionsTasks() {
|
||||||
setDescription("Tests various testing conventions");
|
setDescription("Tests various testing conventions");
|
||||||
// Run only after everything is compiled
|
// Run only after everything is compiled
|
||||||
Boilerplate.getJavaSourceSets(getProject()).all(sourceSet -> dependsOn(sourceSet.getClassesTaskName()));
|
Boilerplate.getJavaSourceSets(getProject()).all(sourceSet -> dependsOn(sourceSet.getClassesTaskName()));
|
||||||
|
naming = getProject().container(TestingConventionRule.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
public Map<String, Set<File>> classFilesPerTask(FileTree testClassFiles) {
|
public Map<String, Set<File>> classFilesPerEnabledTask(FileTree testClassFiles) {
|
||||||
Map<String, Set<File>> collector = new HashMap<>();
|
Map<String, Set<File>> collector = new HashMap<>();
|
||||||
|
|
||||||
// RandomizedTestingTask
|
// RandomizedTestingTask
|
||||||
collector.putAll(
|
collector.putAll(
|
||||||
Stream.concat(
|
getProject().getTasks().withType(getRandomizedTestingTask()).stream()
|
||||||
getProject().getTasks().withType(getRandomizedTestingTask()).stream(),
|
|
||||||
// Look at sub-projects too. As sometimes tests are implemented in parent but ran in sub-projects against
|
|
||||||
// different configurations
|
|
||||||
getProject().getSubprojects().stream().flatMap(subproject ->
|
|
||||||
subproject.getTasks().withType(getRandomizedTestingTask()).stream()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.filter(Task::getEnabled)
|
.filter(Task::getEnabled)
|
||||||
.collect(Collectors.toMap(
|
.collect(Collectors.toMap(
|
||||||
Task::getPath,
|
Task::getPath,
|
||||||
task -> testClassFiles.matching(getRandomizedTestingPatternSet(task)).getFiles()
|
task -> testClassFiles.matching(getRandomizedTestingPatternSet(task)).getFiles()
|
||||||
))
|
)
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
// Gradle Test
|
// Gradle Test
|
||||||
collector.putAll(
|
collector.putAll(
|
||||||
Stream.concat(
|
getProject().getTasks().withType(Test.class).stream()
|
||||||
getProject().getTasks().withType(Test.class).stream(),
|
|
||||||
getProject().getSubprojects().stream().flatMap(subproject ->
|
|
||||||
subproject.getTasks().withType(Test.class).stream()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.filter(Task::getEnabled)
|
.filter(Task::getEnabled)
|
||||||
.collect(Collectors.toMap(
|
.collect(Collectors.toMap(
|
||||||
Task::getPath,
|
Task::getPath,
|
||||||
@ -119,14 +108,22 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
return testClassNames;
|
return testClassNames;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Input
|
||||||
|
public NamedDomainObjectContainer<TestingConventionRule> getNaming() {
|
||||||
|
return naming;
|
||||||
|
}
|
||||||
|
|
||||||
@OutputFile
|
@OutputFile
|
||||||
public File getSuccessMarker() {
|
public File getSuccessMarker() {
|
||||||
return new File(getProject().getBuildDir(), "markers/" + getName());
|
return new File(getProject().getBuildDir(), "markers/" + getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void naming(Closure<TestingConventionRule> action) {
|
||||||
|
naming.configure(action);
|
||||||
|
}
|
||||||
|
|
||||||
@TaskAction
|
@TaskAction
|
||||||
public void doCheck() throws IOException {
|
public void doCheck() throws IOException {
|
||||||
activeTestsExists = false;
|
|
||||||
final String problems;
|
final String problems;
|
||||||
|
|
||||||
try (URLClassLoader isolatedClassLoader = new URLClassLoader(
|
try (URLClassLoader isolatedClassLoader = new URLClassLoader(
|
||||||
@ -134,62 +131,83 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
)) {
|
)) {
|
||||||
Predicate<Class<?>> isStaticClass = clazz -> Modifier.isStatic(clazz.getModifiers());
|
Predicate<Class<?>> isStaticClass = clazz -> Modifier.isStatic(clazz.getModifiers());
|
||||||
Predicate<Class<?>> isPublicClass = clazz -> Modifier.isPublic(clazz.getModifiers());
|
Predicate<Class<?>> isPublicClass = clazz -> Modifier.isPublic(clazz.getModifiers());
|
||||||
Predicate<Class<?>> implementsNamingConvention = clazz ->
|
Predicate<Class<?>> isAbstractClass = clazz -> Modifier.isAbstract(clazz.getModifiers());
|
||||||
clazz.getName().endsWith(TEST_CLASS_SUFIX) ||
|
|
||||||
clazz.getName().endsWith(INTEG_TEST_CLASS_SUFIX);
|
|
||||||
|
|
||||||
Map<File, ? extends Class<?>> classes = getTestClassNames().entrySet().stream()
|
final Map<File, ? extends Class<?>> classes = getTestClassNames().entrySet().stream()
|
||||||
.collect(Collectors.toMap(
|
.collect(Collectors.toMap(
|
||||||
Map.Entry::getValue,
|
Map.Entry::getValue,
|
||||||
entry -> loadClassWithoutInitializing(entry.getKey(), isolatedClassLoader))
|
entry -> loadClassWithoutInitializing(entry.getKey(), isolatedClassLoader))
|
||||||
);
|
);
|
||||||
|
|
||||||
FileTree allTestClassFiles = getProject().files(
|
final FileTree allTestClassFiles = getProject().files(
|
||||||
classes.values().stream()
|
classes.values().stream()
|
||||||
.filter(isStaticClass.negate())
|
.filter(isStaticClass.negate())
|
||||||
.filter(isPublicClass)
|
.filter(isPublicClass)
|
||||||
.filter(implementsNamingConvention)
|
.filter((Predicate<Class<?>>) this::implementsNamingConvention)
|
||||||
.map(clazz -> testClassNames.get(clazz.getName()))
|
.map(clazz -> testClassNames.get(clazz.getName()))
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
).getAsFileTree();
|
).getAsFileTree();
|
||||||
|
|
||||||
final Map<String, Set<File>> classFilesPerTask = classFilesPerTask(allTestClassFiles);
|
final Map<String, Set<File>> classFilesPerTask = classFilesPerEnabledTask(allTestClassFiles);
|
||||||
|
|
||||||
Map<String, Set<Class<?>>> testClassesPerTask = classFilesPerTask.entrySet().stream()
|
final Map<String, Set<Class<?>>> testClassesPerTask = classFilesPerTask.entrySet().stream()
|
||||||
.collect(
|
.collect(
|
||||||
Collectors.toMap(
|
Collectors.toMap(
|
||||||
Map.Entry::getKey,
|
Map.Entry::getKey,
|
||||||
entry -> entry.getValue().stream()
|
entry -> entry.getValue().stream()
|
||||||
.map(classes::get)
|
.map(classes::get)
|
||||||
.filter(implementsNamingConvention)
|
.filter(this::implementsNamingConvention)
|
||||||
.collect(Collectors.toSet())
|
.collect(Collectors.toSet())
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
final Map<String, Set<Class<?>>> suffixToBaseClass;
|
||||||
|
if (classes.isEmpty()) {
|
||||||
|
// Don't load base classes if we don't have any tests.
|
||||||
|
// This allows defaults to be configured for projects that don't have any tests
|
||||||
|
//
|
||||||
|
suffixToBaseClass = Collections.emptyMap();
|
||||||
|
} else {
|
||||||
|
suffixToBaseClass = naming.stream()
|
||||||
|
.collect(
|
||||||
|
Collectors.toMap(
|
||||||
|
TestingConventionRule::getSuffix,
|
||||||
|
rule -> rule.getBaseClasses().stream()
|
||||||
|
.map(each -> loadClassWithoutInitializing(each, isolatedClassLoader))
|
||||||
|
.collect(Collectors.toSet())
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
problems = collectProblems(
|
problems = collectProblems(
|
||||||
checkNoneExists(
|
checkNoneExists(
|
||||||
"Test classes implemented by inner classes will not run",
|
"Test classes implemented by inner classes will not run",
|
||||||
classes.values().stream()
|
classes.values().stream()
|
||||||
.filter(isStaticClass)
|
.filter(isStaticClass)
|
||||||
.filter(implementsNamingConvention.or(this::seemsLikeATest))
|
.filter(isPublicClass)
|
||||||
|
.filter(((Predicate<Class<?>>) this::implementsNamingConvention).or(this::seemsLikeATest))
|
||||||
),
|
),
|
||||||
checkNoneExists(
|
checkNoneExists(
|
||||||
"Seem like test classes but don't match naming convention",
|
"Seem like test classes but don't match naming convention",
|
||||||
classes.values().stream()
|
classes.values().stream()
|
||||||
.filter(isStaticClass.negate())
|
.filter(isStaticClass.negate())
|
||||||
.filter(isPublicClass)
|
.filter(isPublicClass)
|
||||||
.filter(this::seemsLikeATest)
|
.filter(isAbstractClass.negate())
|
||||||
.filter(implementsNamingConvention.negate())
|
.filter(this::seemsLikeATest) // TODO when base classes are set, check for classes that extend them
|
||||||
|
.filter(((Predicate<Class<?>>) this::implementsNamingConvention).negate())
|
||||||
),
|
),
|
||||||
|
// TODO: check for non public classes that seem like tests
|
||||||
|
// TODO: check for abstract classes that implement the naming conventions
|
||||||
|
// No empty enabled tasks
|
||||||
collectProblems(
|
collectProblems(
|
||||||
testClassesPerTask.entrySet().stream()
|
testClassesPerTask.entrySet().stream()
|
||||||
.map( entry ->
|
.map(entry ->
|
||||||
checkAtLeastOneExists(
|
checkAtLeastOneExists(
|
||||||
"test class in " + entry.getKey(),
|
"test class included in task " + entry.getKey(),
|
||||||
entry.getValue().stream()
|
entry.getValue().stream()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.collect(Collectors.joining())
|
.sorted()
|
||||||
|
.collect(Collectors.joining("\n"))
|
||||||
),
|
),
|
||||||
checkNoneExists(
|
checkNoneExists(
|
||||||
"Test classes are not included in any enabled task (" +
|
"Test classes are not included in any enabled task (" +
|
||||||
@ -201,25 +219,43 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
.anyMatch(fileSet -> fileSet.contains(testFile)) == false
|
.anyMatch(fileSet -> fileSet.contains(testFile)) == false
|
||||||
)
|
)
|
||||||
.map(classes::get)
|
.map(classes::get)
|
||||||
|
),
|
||||||
|
collectProblems(
|
||||||
|
suffixToBaseClass.entrySet().stream()
|
||||||
|
.filter(entry -> entry.getValue().isEmpty() == false)
|
||||||
|
.map(entry -> {
|
||||||
|
return checkNoneExists(
|
||||||
|
"Tests classes with suffix `" + entry.getKey() + "` should extend " +
|
||||||
|
entry.getValue().stream().map(Class::getName).collect(Collectors.joining(" or ")) +
|
||||||
|
" but the following classes do not",
|
||||||
|
classes.values().stream()
|
||||||
|
.filter(clazz -> clazz.getName().endsWith(entry.getKey()))
|
||||||
|
.filter(clazz -> entry.getValue().stream()
|
||||||
|
.anyMatch(test -> test.isAssignableFrom(clazz)) == false)
|
||||||
|
);
|
||||||
|
}).sorted()
|
||||||
|
.collect(Collectors.joining("\n"))
|
||||||
)
|
)
|
||||||
|
// TODO: check that the testing tasks are included in the right task based on the name ( from the rule )
|
||||||
|
// TODO: check to make sure that the main source set doesn't have classes that match
|
||||||
|
// the naming convention (just the names, don't load classes)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (problems.isEmpty()) {
|
if (problems.isEmpty()) {
|
||||||
getLogger().error(problems);
|
|
||||||
throw new IllegalStateException("Testing conventions are not honored");
|
|
||||||
} else {
|
|
||||||
getSuccessMarker().getParentFile().mkdirs();
|
getSuccessMarker().getParentFile().mkdirs();
|
||||||
Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE);
|
Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE);
|
||||||
|
} else {
|
||||||
|
getLogger().error(problems);
|
||||||
|
throw new IllegalStateException("Testing conventions are not honored");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String collectProblems(String... problems) {
|
private String collectProblems(String... problems) {
|
||||||
return Stream.of(problems)
|
return Stream.of(problems)
|
||||||
.map(String::trim)
|
.map(String::trim)
|
||||||
.filter(String::isEmpty)
|
.filter(s -> s.isEmpty() == false)
|
||||||
.map(each -> each + "\n")
|
.collect(Collectors.joining("\n"));
|
||||||
.collect(Collectors.joining());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@ -251,10 +287,11 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
private String checkNoneExists(String message, Stream<? extends Class<?>> stream) {
|
private String checkNoneExists(String message, Stream<? extends Class<?>> stream) {
|
||||||
String problem = stream
|
String problem = stream
|
||||||
.map(each -> " * " + each.getName())
|
.map(each -> " * " + each.getName())
|
||||||
|
.sorted()
|
||||||
.collect(Collectors.joining("\n"));
|
.collect(Collectors.joining("\n"));
|
||||||
if (problem.isEmpty() == false) {
|
if (problem.isEmpty() == false) {
|
||||||
return message + ":\n" + problem;
|
return message + ":\n" + problem;
|
||||||
} else{
|
} else {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -263,28 +300,33 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
if (stream.findAny().isPresent()) {
|
if (stream.findAny().isPresent()) {
|
||||||
return "";
|
return "";
|
||||||
} else {
|
} else {
|
||||||
return "Expected at least one " + message + ", but found none.\n";
|
return "Expected at least one " + message + ", but found none.";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean seemsLikeATest(Class<?> clazz) {
|
private boolean seemsLikeATest(Class<?> clazz) {
|
||||||
try {
|
try {
|
||||||
ClassLoader classLoader = clazz.getClassLoader();
|
ClassLoader classLoader = clazz.getClassLoader();
|
||||||
Class<?> junitTest;
|
|
||||||
try {
|
Class<?> junitTest = loadClassWithoutInitializing("org.junit.Assert", classLoader);
|
||||||
junitTest = classLoader.loadClass("junit.framework.Test");
|
|
||||||
} catch (ClassNotFoundException e) {
|
|
||||||
throw new IllegalStateException("Could not load junit.framework.Test. It's expected that this class is " +
|
|
||||||
"available on the tests classpath");
|
|
||||||
}
|
|
||||||
if (junitTest.isAssignableFrom(clazz)) {
|
if (junitTest.isAssignableFrom(clazz)) {
|
||||||
getLogger().info("{} is a test because it extends junit.framework.Test", clazz.getName());
|
getLogger().info("{} is a test because it extends {}", clazz.getName(), junitTest.getName());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Class<?> junitAnnotation = loadClassWithoutInitializing("org.junit.Test", classLoader);
|
||||||
for (Method method : clazz.getMethods()) {
|
for (Method method : clazz.getMethods()) {
|
||||||
if (matchesTestMethodNamingConvention(clazz, method)) return true;
|
if (matchesTestMethodNamingConvention(method)) {
|
||||||
if (isAnnotated(clazz, method, junitTest)) return true;
|
getLogger().info("{} is a test because it has method named '{}'", clazz.getName(), method.getName());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (isAnnotated(method, junitAnnotation)) {
|
||||||
|
getLogger().info("{} is a test because it has method '{}' annotated with '{}'",
|
||||||
|
clazz.getName(), method.getName(), junitAnnotation.getName());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
} catch (NoClassDefFoundError e) {
|
} catch (NoClassDefFoundError e) {
|
||||||
// Include the message to get more info to get more a more useful message when running Gradle without -s
|
// Include the message to get more info to get more a more useful message when running Gradle without -s
|
||||||
@ -294,23 +336,25 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean matchesTestMethodNamingConvention(Class<?> clazz, Method method) {
|
private boolean implementsNamingConvention(Class<?> clazz) {
|
||||||
if (method.getName().startsWith(TEST_METHOD_PREFIX) &&
|
if (naming.stream()
|
||||||
Modifier.isStatic(method.getModifiers()) == false &&
|
.map(TestingConventionRule::getSuffix)
|
||||||
method.getReturnType().equals(Void.class)
|
.anyMatch(suffix -> clazz.getName().endsWith(suffix))) {
|
||||||
) {
|
getLogger().info("{} is a test because it matches the naming convention", clazz.getName());
|
||||||
getLogger().info("{} is a test because it has method: {}", clazz.getName(), method.getName());
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isAnnotated(Class<?> clazz, Method method, Class<?> annotation) {
|
private boolean matchesTestMethodNamingConvention(Method method) {
|
||||||
|
return method.getName().startsWith(TEST_METHOD_PREFIX) &&
|
||||||
|
Modifier.isStatic(method.getModifiers()) == false
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isAnnotated(Method method, Class<?> annotation) {
|
||||||
for (Annotation presentAnnotation : method.getAnnotations()) {
|
for (Annotation presentAnnotation : method.getAnnotations()) {
|
||||||
if (annotation.isAssignableFrom(presentAnnotation.getClass())) {
|
if (annotation.isAssignableFrom(presentAnnotation.getClass())) {
|
||||||
getLogger().info("{} is a test because {} is annotated with junit.framework.Test",
|
|
||||||
clazz.getName(), method.getName()
|
|
||||||
);
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -380,14 +424,14 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||||||
|
|
||||||
private Class<?> loadClassWithoutInitializing(String name, ClassLoader isolatedClassLoader) {
|
private Class<?> loadClassWithoutInitializing(String name, ClassLoader isolatedClassLoader) {
|
||||||
try {
|
try {
|
||||||
return Class.forName(name,
|
return Class.forName(
|
||||||
|
name,
|
||||||
// Don't initialize the class to save time. Not needed for this test and this doesn't share a VM with any other tests.
|
// Don't initialize the class to save time. Not needed for this test and this doesn't share a VM with any other tests.
|
||||||
false,
|
false,
|
||||||
isolatedClassLoader
|
isolatedClassLoader
|
||||||
);
|
);
|
||||||
} catch (ClassNotFoundException e) {
|
} catch (ClassNotFoundException e) {
|
||||||
// Will not get here as the exception will be loaded by isolatedClassLoader
|
throw new RuntimeException("Failed to load class " + name + ". Incorrect test runtime classpath?", e);
|
||||||
throw new RuntimeException("Failed to load class " + name, e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.gradle.testfixtures;
|
|||||||
import com.avast.gradle.dockercompose.ComposeExtension;
|
import com.avast.gradle.dockercompose.ComposeExtension;
|
||||||
import com.avast.gradle.dockercompose.DockerComposePlugin;
|
import com.avast.gradle.dockercompose.DockerComposePlugin;
|
||||||
import org.elasticsearch.gradle.precommit.JarHellTask;
|
import org.elasticsearch.gradle.precommit.JarHellTask;
|
||||||
|
import org.elasticsearch.gradle.precommit.TestingConventionsTasks;
|
||||||
import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask;
|
import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask;
|
||||||
import org.gradle.api.DefaultTask;
|
import org.gradle.api.DefaultTask;
|
||||||
import org.gradle.api.Plugin;
|
import org.gradle.api.Plugin;
|
||||||
@ -100,6 +101,10 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||||||
tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task ->
|
tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task ->
|
||||||
task.setEnabled(false)
|
task.setEnabled(false)
|
||||||
);
|
);
|
||||||
|
// conventions are not honored when the tasks are disabled
|
||||||
|
tasks.withType(TestingConventionsTasks.class, task ->
|
||||||
|
task.setEnabled(false)
|
||||||
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task ->
|
tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task ->
|
||||||
|
@ -48,24 +48,17 @@
|
|||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoriesService.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]Repository.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]VerifyNodeRepositoryAction.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]blobstore[/\\]BlobStoreRepository.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]fs[/\\]FsRepository.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexing[/\\]IndexActionIT.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexing[/\\]IndexActionIT.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponseTests.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasRoutingIT.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasRoutingIT.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]SimpleRoutingIT.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]SimpleRoutingIT.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptServiceTests.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptServiceTests.java" checks="LineLength" />
|
||||||
|
|
||||||
<!-- Temporarily contains extra-long lines as examples for tests to be written, see https://github.com/elastic/elasticsearch/issues/34829 -->
|
<!-- Temporarily contains extra-long lines as examples for tests to be written, see https://github.com/elastic/elasticsearch/issues/34829 -->
|
||||||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
|
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
|
||||||
|
|
||||||
|
<!-- Gradle requires inputs to be seriablizable -->
|
||||||
|
<suppress files="buildSrc[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gradle[/\\]precommit[/\\]TestingConventionRule.java" checks="RegexpSinglelineJava" />
|
||||||
</suppressions>
|
</suppressions>
|
||||||
|
@ -1 +1 @@
|
|||||||
5.0
|
5.1
|
@ -0,0 +1,108 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.precommit;
|
||||||
|
|
||||||
|
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||||
|
import org.gradle.testkit.runner.BuildResult;
|
||||||
|
import org.gradle.testkit.runner.GradleRunner;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
public class TestingConventionsTasksIT extends GradleIntegrationTestCase {
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testInnerClasses() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":no_tests_in_inner_classes:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.buildAndFail();
|
||||||
|
assertOutputContains(result.getOutput(),
|
||||||
|
"Test classes implemented by inner classes will not run:",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention1",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention2",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention3",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$NamingConventionIT",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$NamingConventionTests"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNamingConvention() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":incorrect_naming_conventions:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.buildAndFail();
|
||||||
|
assertOutputContains(result.getOutput(),
|
||||||
|
"Seem like test classes but don't match naming convention:",
|
||||||
|
" * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention1",
|
||||||
|
" * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention2",
|
||||||
|
" * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention3"
|
||||||
|
);
|
||||||
|
assertOutputDoesNotContain(result.getOutput(), "LooksLikeTestsButAbstract");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNoEmptyTasks() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":empty_test_task:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.buildAndFail();
|
||||||
|
assertOutputContains(result.getOutput(),
|
||||||
|
"Expected at least one test class included in task :empty_test_task:emptyTest, but found none.",
|
||||||
|
"Expected at least one test class included in task :empty_test_task:emptyTestRandomized, but found none."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAllTestTasksIncluded() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":all_classes_in_tasks:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.buildAndFail();
|
||||||
|
assertOutputContains(result.getOutput(),
|
||||||
|
"Test classes are not included in any enabled task (:all_classes_in_tasks:emptyTestRandomized):",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NamingConventionIT",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NamingConventionTests"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testTaskNotImplementBaseClass() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":not_implementing_base:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.buildAndFail();
|
||||||
|
assertOutputContains(result.getOutput(),
|
||||||
|
"Tests classes with suffix `IT` should extend org.elasticsearch.gradle.testkit.Integration but the following classes do not:",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NamingConventionIT",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NamingConventionMissmatchIT",
|
||||||
|
"Tests classes with suffix `Tests` should extend org.elasticsearch.gradle.testkit.Unit but the following classes do not:",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NamingConventionMissmatchTests",
|
||||||
|
" * org.elasticsearch.gradle.testkit.NamingConventionTests"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidSetupWithoutBaseClass() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":valid_setup_no_base:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.build();
|
||||||
|
assertTaskSuccessful(result, ":valid_setup_no_base:testingConventions");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidSetupWithBaseClass() {
|
||||||
|
GradleRunner runner = getGradleRunner("testingConventions")
|
||||||
|
.withArguments("clean", ":valid_setup_with_base:testingConventions", "-i", "-s");
|
||||||
|
BuildResult result = runner.build();
|
||||||
|
assertTaskSuccessful(result, ":valid_setup_with_base:testingConventions");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -43,7 +43,7 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
|
|||||||
if (index.equals(index.stream().sorted().collect(Collectors.toList())) == false) {
|
if (index.equals(index.stream().sorted().collect(Collectors.toList())) == false) {
|
||||||
fail("Expected the following lines to appear in this order:\n" +
|
fail("Expected the following lines to appear in this order:\n" +
|
||||||
Stream.of(lines).map(line -> " - `" + line + "`").collect(Collectors.joining("\n")) +
|
Stream.of(lines).map(line -> " - `" + line + "`").collect(Collectors.joining("\n")) +
|
||||||
"\nBut they did not. Output is:\n\n```" + output + "\n```\n"
|
"\nTBut the order was different. Output is:\n\n```" + output + "\n```\n"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,9 +21,11 @@ package org.elasticsearch.gradle.testclusters;
|
|||||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||||
import org.gradle.testkit.runner.BuildResult;
|
import org.gradle.testkit.runner.BuildResult;
|
||||||
import org.gradle.testkit.runner.GradleRunner;
|
import org.gradle.testkit.runner.GradleRunner;
|
||||||
|
import org.junit.Ignore;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
@Ignore // https://github.com/elastic/elasticsearch/issues/37218
|
||||||
public class TestClustersPluginIT extends GradleIntegrationTestCase {
|
public class TestClustersPluginIT extends GradleIntegrationTestCase {
|
||||||
|
|
||||||
public void testListClusters() {
|
public void testListClusters() {
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionIT {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionTests {
|
||||||
|
|
||||||
|
}
|
86
buildSrc/src/testKit/testingConventions/build.gradle
Normal file
86
buildSrc/src/testKit/testingConventions/build.gradle
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
plugins {
|
||||||
|
id 'elasticsearch.build' apply false
|
||||||
|
}
|
||||||
|
|
||||||
|
allprojects {
|
||||||
|
apply plugin: 'java'
|
||||||
|
apply plugin: 'elasticsearch.build'
|
||||||
|
|
||||||
|
repositories {
|
||||||
|
jcenter()
|
||||||
|
}
|
||||||
|
dependencies {
|
||||||
|
testCompile "junit:junit:4.12"
|
||||||
|
}
|
||||||
|
|
||||||
|
ext.licenseFile = file("$buildDir/dummy/license")
|
||||||
|
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||||
|
|
||||||
|
testingConventions.naming {
|
||||||
|
// Reset default to no baseClass checks
|
||||||
|
Tests {
|
||||||
|
baseClasses = []
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClasses = []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unitTest.enabled = false
|
||||||
|
}
|
||||||
|
|
||||||
|
project(':empty_test_task') {
|
||||||
|
task emptyTest(type: Test) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
task emptyTestRandomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
project(':all_classes_in_tasks') {
|
||||||
|
task emptyTestRandomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) {
|
||||||
|
include "**/Convention*"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
project(':not_implementing_base') {
|
||||||
|
testingConventions.naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.elasticsearch.gradle.testkit.Unit'
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.gradle.testkit.Integration'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
task randomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) {
|
||||||
|
include "**/*IT.class"
|
||||||
|
include "**/*Tests.class"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
project(':valid_setup_no_base') {
|
||||||
|
task randomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) {
|
||||||
|
include "**/*IT.class"
|
||||||
|
include "**/*Tests.class"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
project (':valid_setup_with_base') {
|
||||||
|
task randomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) {
|
||||||
|
include "**/*IT.class"
|
||||||
|
include "**/*Tests.class"
|
||||||
|
}
|
||||||
|
testingConventions.naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.elasticsearch.gradle.testkit.Unit'
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.gradle.testkit.Integration'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
0
buildSrc/src/testKit/testingConventions/empty_test_task/.gitignore
vendored
Normal file
0
buildSrc/src/testKit/testingConventions/empty_test_task/.gitignore
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class LooksLikeATestWithoutNamingConvention1 {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void annotatedTestMethod() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
|
public class LooksLikeATestWithoutNamingConvention2 extends Assert {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class LooksLikeATestWithoutNamingConvention3 {
|
||||||
|
|
||||||
|
public void testMethod() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public abstract class LooksLikeTestsButAbstract {
|
||||||
|
|
||||||
|
public void testMethod() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionIT {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionTests {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,64 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class NastyInnerClasses {
|
||||||
|
|
||||||
|
public static class NamingConventionTests {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class NamingConventionIT {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class LooksLikeATestWithoutNamingConvention1 {
|
||||||
|
@Test
|
||||||
|
public void annotatedTestMethod() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class LooksLikeATestWithoutNamingConvention2 extends Assert {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class LooksLikeATestWithoutNamingConvention3 {
|
||||||
|
|
||||||
|
public void testMethod() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static abstract public class NonOffendingAbstractTests {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class NonOffendingPrivateTests {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static class NonOffendingPackageTests {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public abstract class AbstractIT {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class Integration {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionIT {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionMissmatchIT extends Unit {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionMissmatchTests extends Integration {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionTests {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class Unit {
|
||||||
|
|
||||||
|
}
|
7
buildSrc/src/testKit/testingConventions/settings.gradle
Normal file
7
buildSrc/src/testKit/testingConventions/settings.gradle
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
include 'no_tests_in_inner_classes'
|
||||||
|
include 'incorrect_naming_conventions'
|
||||||
|
include 'empty_test_task'
|
||||||
|
include 'all_classes_in_tasks'
|
||||||
|
include 'not_implementing_base'
|
||||||
|
include 'valid_setup_no_base'
|
||||||
|
include 'valid_setup_with_base'
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionIT {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionTests {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class Integration {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionIT extends Integration {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class NamingConventionTests extends Unit {
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.gradle.testkit;
|
||||||
|
|
||||||
|
public class Unit {
|
||||||
|
|
||||||
|
}
|
@ -24,7 +24,7 @@ import org.apache.http.client.methods.HttpDelete;
|
|||||||
import org.apache.http.client.methods.HttpGet;
|
import org.apache.http.client.methods.HttpGet;
|
||||||
import org.apache.http.client.methods.HttpPost;
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
|
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
|
||||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||||
@ -462,7 +462,7 @@ final class MLRequestConverters {
|
|||||||
BytesReference content = postDataRequest.getContent();
|
BytesReference content = postDataRequest.getContent();
|
||||||
if (content != null) {
|
if (content != null) {
|
||||||
BytesRef source = postDataRequest.getContent().toBytesRef();
|
BytesRef source = postDataRequest.getContent().toBytesRef();
|
||||||
HttpEntity byteEntity = new ByteArrayEntity(source.bytes,
|
HttpEntity byteEntity = new NByteArrayEntity(source.bytes,
|
||||||
source.offset,
|
source.offset,
|
||||||
source.length,
|
source.length,
|
||||||
createContentType(postDataRequest.getXContentType()));
|
createContentType(postDataRequest.getXContentType()));
|
||||||
@ -686,7 +686,7 @@ final class MLRequestConverters {
|
|||||||
|
|
||||||
BytesReference sample = findFileStructureRequest.getSample();
|
BytesReference sample = findFileStructureRequest.getSample();
|
||||||
BytesRef source = sample.toBytesRef();
|
BytesRef source = sample.toBytesRef();
|
||||||
HttpEntity byteEntity = new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON));
|
HttpEntity byteEntity = new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON));
|
||||||
request.setEntity(byteEntity);
|
request.setEntity(byteEntity);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,8 @@ import org.apache.http.client.methods.HttpGet;
|
|||||||
import org.apache.http.client.methods.HttpHead;
|
import org.apache.http.client.methods.HttpHead;
|
||||||
import org.apache.http.client.methods.HttpPost;
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.entity.ContentType;
|
||||||
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||||
@ -239,7 +239,7 @@ final class RequestConverters {
|
|||||||
content.write(separator);
|
content.write(separator);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
request.setEntity(new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType));
|
request.setEntity(new NByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -322,7 +322,7 @@ final class RequestConverters {
|
|||||||
|
|
||||||
BytesRef source = indexRequest.source().toBytesRef();
|
BytesRef source = indexRequest.source().toBytesRef();
|
||||||
ContentType contentType = createContentType(indexRequest.getContentType());
|
ContentType contentType = createContentType(indexRequest.getContentType());
|
||||||
request.setEntity(new ByteArrayEntity(source.bytes, source.offset, source.length, contentType));
|
request.setEntity(new NByteArrayEntity(source.bytes, source.offset, source.length, contentType));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -431,7 +431,7 @@ final class RequestConverters {
|
|||||||
|
|
||||||
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
||||||
byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent);
|
byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent);
|
||||||
request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type())));
|
request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type())));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -464,7 +464,7 @@ final class RequestConverters {
|
|||||||
|
|
||||||
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
||||||
byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent);
|
byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent);
|
||||||
request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type())));
|
request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type())));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -694,7 +694,7 @@ final class RequestConverters {
|
|||||||
static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams)
|
static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, toXContentParams, false).toBytesRef();
|
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, toXContentParams, false).toBytesRef();
|
||||||
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||||
}
|
}
|
||||||
|
|
||||||
static String endpoint(String index, String type, String id) {
|
static String endpoint(String index, String type, String id) {
|
||||||
|
@ -23,8 +23,8 @@ import org.apache.http.client.methods.HttpDelete;
|
|||||||
import org.apache.http.client.methods.HttpGet;
|
import org.apache.http.client.methods.HttpGet;
|
||||||
import org.apache.http.client.methods.HttpPost;
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.entity.ContentType;
|
||||||
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.elasticsearch.client.watcher.AckWatchRequest;
|
import org.elasticsearch.client.watcher.AckWatchRequest;
|
||||||
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
import org.elasticsearch.client.watcher.ActivateWatchRequest;
|
||||||
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
import org.elasticsearch.client.watcher.DeactivateWatchRequest;
|
||||||
@ -75,7 +75,7 @@ final class WatcherRequestConverters {
|
|||||||
}
|
}
|
||||||
ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType());
|
ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType());
|
||||||
BytesReference source = putWatchRequest.getSource();
|
BytesReference source = putWatchRequest.getSource();
|
||||||
request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
request.setEntity(new NByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,3 +30,7 @@ org.elasticsearch.common.logging.PrefixLogger
|
|||||||
|
|
||||||
@defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations!
|
@defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations!
|
||||||
org.elasticsearch.common.xcontent.LoggingDeprecationHandler
|
org.elasticsearch.common.xcontent.LoggingDeprecationHandler
|
||||||
|
|
||||||
|
@defaultMessage Use Nonblocking org.apache.http.nio.entity.NByteArrayEntity
|
||||||
|
org.apache.http.entity.ByteArrayEntity
|
||||||
|
org.apache.http.entity.StringEntity
|
||||||
|
@ -875,7 +875,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||||||
// test1: create one doc in dest
|
// test1: create one doc in dest
|
||||||
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
|
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
|
||||||
updateByQueryRequest.indices(sourceIndex);
|
updateByQueryRequest.indices(sourceIndex);
|
||||||
updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("_doc"));
|
updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1"));
|
||||||
updateByQueryRequest.setRefresh(true);
|
updateByQueryRequest.setRefresh(true);
|
||||||
BulkByScrollResponse bulkResponse =
|
BulkByScrollResponse bulkResponse =
|
||||||
execute(updateByQueryRequest, highLevelClient()::updateByQuery, highLevelClient()::updateByQueryAsync);
|
execute(updateByQueryRequest, highLevelClient()::updateByQuery, highLevelClient()::updateByQueryAsync);
|
||||||
@ -917,7 +917,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||||||
// test update-by-query rethrottling
|
// test update-by-query rethrottling
|
||||||
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
|
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
|
||||||
updateByQueryRequest.indices(sourceIndex);
|
updateByQueryRequest.indices(sourceIndex);
|
||||||
updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("_doc"));
|
updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1"));
|
||||||
updateByQueryRequest.setRefresh(true);
|
updateByQueryRequest.setRefresh(true);
|
||||||
|
|
||||||
// this following settings are supposed to halt reindexing after first document
|
// this following settings are supposed to halt reindexing after first document
|
||||||
@ -987,7 +987,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||||||
// test1: delete one doc
|
// test1: delete one doc
|
||||||
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
||||||
deleteByQueryRequest.indices(sourceIndex);
|
deleteByQueryRequest.indices(sourceIndex);
|
||||||
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("_doc"));
|
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1"));
|
||||||
deleteByQueryRequest.setRefresh(true);
|
deleteByQueryRequest.setRefresh(true);
|
||||||
BulkByScrollResponse bulkResponse =
|
BulkByScrollResponse bulkResponse =
|
||||||
execute(deleteByQueryRequest, highLevelClient()::deleteByQuery, highLevelClient()::deleteByQueryAsync);
|
execute(deleteByQueryRequest, highLevelClient()::deleteByQuery, highLevelClient()::deleteByQueryAsync);
|
||||||
@ -1009,7 +1009,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||||||
// test delete-by-query rethrottling
|
// test delete-by-query rethrottling
|
||||||
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
||||||
deleteByQueryRequest.indices(sourceIndex);
|
deleteByQueryRequest.indices(sourceIndex);
|
||||||
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("2", "3").types("_doc"));
|
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("2", "3"));
|
||||||
deleteByQueryRequest.setRefresh(true);
|
deleteByQueryRequest.setRefresh(true);
|
||||||
|
|
||||||
// this following settings are supposed to halt reindexing after first document
|
// this following settings are supposed to halt reindexing after first document
|
||||||
|
@ -24,10 +24,10 @@ import org.apache.http.HttpHost;
|
|||||||
import org.apache.http.ProtocolVersion;
|
import org.apache.http.ProtocolVersion;
|
||||||
import org.apache.http.RequestLine;
|
import org.apache.http.RequestLine;
|
||||||
import org.apache.http.client.methods.HttpGet;
|
import org.apache.http.client.methods.HttpGet;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.entity.ContentType;
|
||||||
import org.apache.http.message.BasicRequestLine;
|
import org.apache.http.message.BasicRequestLine;
|
||||||
import org.apache.http.message.BasicStatusLine;
|
import org.apache.http.message.BasicStatusLine;
|
||||||
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.Build;
|
import org.elasticsearch.Build;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
@ -166,7 +166,7 @@ public class CustomRestHighLevelClientTests extends ESTestCase {
|
|||||||
|
|
||||||
MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT);
|
MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT);
|
||||||
BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef();
|
BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef();
|
||||||
when(mockResponse.getEntity()).thenReturn(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON));
|
when(mockResponse.getEntity()).thenReturn(new NByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON));
|
||||||
|
|
||||||
RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol);
|
RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol);
|
||||||
when(mockResponse.getRequestLine()).thenReturn(requestLine);
|
when(mockResponse.getRequestLine()).thenReturn(requestLine);
|
||||||
|
@ -25,7 +25,7 @@ import org.apache.http.client.methods.HttpGet;
|
|||||||
import org.apache.http.client.methods.HttpHead;
|
import org.apache.http.client.methods.HttpHead;
|
||||||
import org.apache.http.client.methods.HttpPost;
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||||
@ -671,7 +671,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertEquals(method, request.getMethod());
|
assertEquals(method, request.getMethod());
|
||||||
|
|
||||||
HttpEntity entity = request.getEntity();
|
HttpEntity entity = request.getEntity();
|
||||||
assertTrue(entity instanceof ByteArrayEntity);
|
assertTrue(entity instanceof NByteArrayEntity);
|
||||||
assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue());
|
assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue());
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
|
try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
|
||||||
assertEquals(nbFields, parser.map().size());
|
assertEquals(nbFields, parser.map().size());
|
||||||
@ -714,7 +714,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertEquals(method, request.getMethod());
|
assertEquals(method, request.getMethod());
|
||||||
|
|
||||||
HttpEntity entity = request.getEntity();
|
HttpEntity entity = request.getEntity();
|
||||||
assertTrue(entity instanceof ByteArrayEntity);
|
assertTrue(entity instanceof NByteArrayEntity);
|
||||||
assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue());
|
assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue());
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
|
try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
|
||||||
assertEquals(nbFields, parser.map().size());
|
assertEquals(nbFields, parser.map().size());
|
||||||
@ -787,7 +787,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
|
|
||||||
HttpEntity entity = request.getEntity();
|
HttpEntity entity = request.getEntity();
|
||||||
assertTrue(entity instanceof ByteArrayEntity);
|
assertTrue(entity instanceof NByteArrayEntity);
|
||||||
|
|
||||||
UpdateRequest parsedUpdateRequest = new UpdateRequest();
|
UpdateRequest parsedUpdateRequest = new UpdateRequest();
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ package org.elasticsearch.client;
|
|||||||
|
|
||||||
import org.apache.http.HttpEntity;
|
import org.apache.http.HttpEntity;
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.entity.ContentType;
|
||||||
import org.apache.http.entity.StringEntity;
|
import org.apache.http.nio.entity.NStringEntity;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
@ -51,14 +51,14 @@ public class RestHighLevelClientExtTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testParseEntityCustomResponseSection() throws IOException {
|
public void testParseEntityCustomResponseSection() throws IOException {
|
||||||
{
|
{
|
||||||
HttpEntity jsonEntity = new StringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON);
|
HttpEntity jsonEntity = new NStringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON);
|
||||||
BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
|
BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
|
||||||
assertThat(customSection, instanceOf(CustomResponseSection1.class));
|
assertThat(customSection, instanceOf(CustomResponseSection1.class));
|
||||||
CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection;
|
CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection;
|
||||||
assertEquals("value", customResponseSection1.value);
|
assertEquals("value", customResponseSection1.value);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
HttpEntity jsonEntity = new StringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON);
|
HttpEntity jsonEntity = new NStringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON);
|
||||||
BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
|
BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
|
||||||
assertThat(customSection, instanceOf(CustomResponseSection2.class));
|
assertThat(customSection, instanceOf(CustomResponseSection2.class));
|
||||||
CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection;
|
CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection;
|
||||||
|
@ -27,12 +27,11 @@ import org.apache.http.ProtocolVersion;
|
|||||||
import org.apache.http.RequestLine;
|
import org.apache.http.RequestLine;
|
||||||
import org.apache.http.StatusLine;
|
import org.apache.http.StatusLine;
|
||||||
import org.apache.http.client.methods.HttpGet;
|
import org.apache.http.client.methods.HttpGet;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.entity.ContentType;
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.apache.http.message.BasicHttpResponse;
|
import org.apache.http.message.BasicHttpResponse;
|
||||||
import org.apache.http.message.BasicRequestLine;
|
import org.apache.http.message.BasicRequestLine;
|
||||||
import org.apache.http.message.BasicStatusLine;
|
import org.apache.http.message.BasicStatusLine;
|
||||||
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.apache.http.nio.entity.NStringEntity;
|
import org.apache.http.nio.entity.NStringEntity;
|
||||||
import org.elasticsearch.Build;
|
import org.elasticsearch.Build;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
@ -243,11 +242,11 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
{
|
{
|
||||||
IllegalStateException ise = expectThrows(IllegalStateException.class,
|
IllegalStateException ise = expectThrows(IllegalStateException.class,
|
||||||
() -> restHighLevelClient.parseEntity(new StringEntity("", (ContentType) null), null));
|
() -> restHighLevelClient.parseEntity(new NStringEntity("", (ContentType) null), null));
|
||||||
assertEquals("Elasticsearch didn't return the [Content-Type] header, unable to parse response body", ise.getMessage());
|
assertEquals("Elasticsearch didn't return the [Content-Type] header, unable to parse response body", ise.getMessage());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringEntity entity = new StringEntity("", ContentType.APPLICATION_SVG_XML);
|
NStringEntity entity = new NStringEntity("", ContentType.APPLICATION_SVG_XML);
|
||||||
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(entity, null));
|
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(entity, null));
|
||||||
assertEquals("Unsupported Content-Type: " + entity.getContentType().getValue(), ise.getMessage());
|
assertEquals("Unsupported Content-Type: " + entity.getContentType().getValue(), ise.getMessage());
|
||||||
}
|
}
|
||||||
@ -260,9 +259,9 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
||||||
return value;
|
return value;
|
||||||
};
|
};
|
||||||
HttpEntity jsonEntity = new StringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON);
|
HttpEntity jsonEntity = new NStringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON);
|
||||||
assertEquals("value", restHighLevelClient.parseEntity(jsonEntity, entityParser));
|
assertEquals("value", restHighLevelClient.parseEntity(jsonEntity, entityParser));
|
||||||
HttpEntity yamlEntity = new StringEntity("---\nfield: value\n", ContentType.create("application/yaml"));
|
HttpEntity yamlEntity = new NStringEntity("---\nfield: value\n", ContentType.create("application/yaml"));
|
||||||
assertEquals("value", restHighLevelClient.parseEntity(yamlEntity, entityParser));
|
assertEquals("value", restHighLevelClient.parseEntity(yamlEntity, entityParser));
|
||||||
HttpEntity smileEntity = createBinaryEntity(SmileXContent.contentBuilder(), ContentType.create("application/smile"));
|
HttpEntity smileEntity = createBinaryEntity(SmileXContent.contentBuilder(), ContentType.create("application/smile"));
|
||||||
assertEquals("value", restHighLevelClient.parseEntity(smileEntity, entityParser));
|
assertEquals("value", restHighLevelClient.parseEntity(smileEntity, entityParser));
|
||||||
@ -276,7 +275,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("field", "value");
|
builder.field("field", "value");
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType);
|
return new NByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,7 +301,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
{
|
{
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
|
httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
|
||||||
ContentType.APPLICATION_JSON));
|
ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
@ -314,7 +313,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
{
|
{
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON));
|
httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
|
ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
|
||||||
@ -326,7 +325,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
{
|
{
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
|
httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
|
ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
|
||||||
@ -378,7 +377,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
|
httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
|
||||||
ContentType.APPLICATION_JSON));
|
ContentType.APPLICATION_JSON));
|
||||||
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(mockResponse);
|
ResponseException responseException = new ResponseException(mockResponse);
|
||||||
@ -396,7 +395,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON));
|
httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON));
|
||||||
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(mockResponse);
|
ResponseException responseException = new ResponseException(mockResponse);
|
||||||
when(restClient.performRequest(any(Request.class))).thenThrow(responseException);
|
when(restClient.performRequest(any(Request.class))).thenThrow(responseException);
|
||||||
@ -414,7 +413,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
|
httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
|
||||||
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(mockResponse);
|
ResponseException responseException = new ResponseException(mockResponse);
|
||||||
when(restClient.performRequest(any(Request.class))).thenThrow(responseException);
|
when(restClient.performRequest(any(Request.class))).thenThrow(responseException);
|
||||||
@ -458,7 +457,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
MainRequest mainRequest = new MainRequest();
|
MainRequest mainRequest = new MainRequest();
|
||||||
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
CheckedFunction<MainRequest, Request, IOException> requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/");
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}",
|
httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":404}",
|
||||||
ContentType.APPLICATION_JSON));
|
ContentType.APPLICATION_JSON));
|
||||||
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(mockResponse);
|
ResponseException responseException = new ResponseException(mockResponse);
|
||||||
@ -528,7 +527,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
|
response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
|
httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
|
||||||
ContentType.APPLICATION_JSON));
|
ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
@ -547,7 +546,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
|
response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON));
|
httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
responseListener.onFailure(responseException);
|
responseListener.onFailure(responseException);
|
||||||
@ -564,7 +563,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
|
response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
|
||||||
RestStatus restStatus = randomFrom(RestStatus.values());
|
RestStatus restStatus = randomFrom(RestStatus.values());
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
|
||||||
httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
|
httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
responseListener.onFailure(responseException);
|
responseListener.onFailure(responseException);
|
||||||
@ -614,7 +613,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||||||
ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
|
ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
|
||||||
response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404));
|
response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404));
|
||||||
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
|
HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
|
||||||
httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}",
|
httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":404}",
|
||||||
ContentType.APPLICATION_JSON));
|
ContentType.APPLICATION_JSON));
|
||||||
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
|
@ -207,11 +207,10 @@ public class QueryDSLDocumentationTests extends ESTestCase {
|
|||||||
// Using pre-indexed shapes
|
// Using pre-indexed shapes
|
||||||
GeoShapeQueryBuilder qb = geoShapeQuery(
|
GeoShapeQueryBuilder qb = geoShapeQuery(
|
||||||
"pin.location", // <1>
|
"pin.location", // <1>
|
||||||
"DEU", // <2>
|
"DEU"); // <2>
|
||||||
"countries"); // <3>
|
qb.relation(ShapeRelation.WITHIN) // <3>
|
||||||
qb.relation(ShapeRelation.WITHIN) // <4>
|
.indexedShapeIndex("shapes") // <4>
|
||||||
.indexedShapeIndex("shapes") // <5>
|
.indexedShapePath("location"); // <5>
|
||||||
.indexedShapePath("location"); // <6>
|
|
||||||
// end::indexed_geo_shape
|
// end::indexed_geo_shape
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -236,9 +235,6 @@ public class QueryDSLDocumentationTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testIds() {
|
public void testIds() {
|
||||||
// tag::ids
|
// tag::ids
|
||||||
idsQuery("my_type", "type2")
|
|
||||||
.addIds("1", "4", "100");
|
|
||||||
|
|
||||||
idsQuery() // <1>
|
idsQuery() // <1>
|
||||||
.addIds("1", "4", "100");
|
.addIds("1", "4", "100");
|
||||||
// end::ids
|
// end::ids
|
||||||
|
@ -77,6 +77,15 @@ namingConventions {
|
|||||||
skipIntegTestInDisguise = true
|
skipIntegTestInDisguise = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.elasticsearch.client.RestClientTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
thirdPartyAudit.ignoreMissingClasses (
|
thirdPartyAudit.ignoreMissingClasses (
|
||||||
//commons-logging optional dependencies
|
//commons-logging optional dependencies
|
||||||
'org.apache.avalon.framework.logger.Logger',
|
'org.apache.avalon.framework.logger.Logger',
|
||||||
|
@ -91,7 +91,7 @@ public class RestClientDocumentation {
|
|||||||
// end::rest-client-options-singleton
|
// end::rest-client-options-singleton
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public void testUsage() throws IOException, InterruptedException {
|
public void usage() throws IOException, InterruptedException {
|
||||||
|
|
||||||
//tag::rest-client-init
|
//tag::rest-client-init
|
||||||
RestClient restClient = RestClient.builder(
|
RestClient restClient = RestClient.builder(
|
||||||
@ -291,7 +291,7 @@ public class RestClientDocumentation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public void testCommonConfiguration() throws Exception {
|
public void commonConfiguration() throws Exception {
|
||||||
{
|
{
|
||||||
//tag::rest-client-config-timeouts
|
//tag::rest-client-config-timeouts
|
||||||
RestClientBuilder builder = RestClient.builder(
|
RestClientBuilder builder = RestClient.builder(
|
||||||
|
@ -78,6 +78,16 @@ namingConventions {
|
|||||||
skipIntegTestInDisguise = true
|
skipIntegTestInDisguise = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.elasticsearch.client.RestClientTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
dependencyLicenses {
|
dependencyLicenses {
|
||||||
dependencies = project.configurations.runtime.fileCollection {
|
dependencies = project.configurations.runtime.fileCollection {
|
||||||
it.group.startsWith('org.elasticsearch') == false
|
it.group.startsWith('org.elasticsearch') == false
|
||||||
|
@ -52,7 +52,7 @@ import java.util.concurrent.TimeUnit;
|
|||||||
public class SnifferDocumentation {
|
public class SnifferDocumentation {
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public void testUsage() throws IOException {
|
public void usage() throws IOException {
|
||||||
{
|
{
|
||||||
//tag::sniffer-init
|
//tag::sniffer-init
|
||||||
RestClient restClient = RestClient.builder(
|
RestClient restClient = RestClient.builder(
|
||||||
|
@ -52,3 +52,12 @@ namingConventions {
|
|||||||
//we don't have integration tests
|
//we don't have integration tests
|
||||||
skipIntegTestInDisguise = true
|
skipIntegTestInDisguise = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'com.carrotsearch.randomizedtesting.RandomizedTest'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -38,6 +38,15 @@ namingConventions {
|
|||||||
skipIntegTestInDisguise = true
|
skipIntegTestInDisguise = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'org.elasticsearch.tools.launchers.LaunchersTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
javadoc.enabled = false
|
javadoc.enabled = false
|
||||||
loggerUsageCheck.enabled = false
|
loggerUsageCheck.enabled = false
|
||||||
jarHell.enabled = false
|
jarHell.enabled = false
|
||||||
|
@ -51,7 +51,6 @@ include-tagged::{query-dsl-test}[indexed_geo_shape]
|
|||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> field
|
<1> field
|
||||||
<2> The ID of the document that containing the pre-indexed shape.
|
<2> The ID of the document that containing the pre-indexed shape.
|
||||||
<3> Index type where the pre-indexed shape is.
|
<3> relation
|
||||||
<4> relation
|
<4> Name of the index where the pre-indexed shape is. Defaults to 'shapes'.
|
||||||
<5> Name of the index where the pre-indexed shape is. Defaults to 'shapes'.
|
<5> The field specified as path containing the pre-indexed shape. Defaults to 'shape'.
|
||||||
<6> The field specified as path containing the pre-indexed shape. Defaults to 'shape'.
|
|
||||||
|
@ -8,4 +8,3 @@ See {ref}/query-dsl-ids-query.html[Ids Query]
|
|||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{query-dsl-test}[ids]
|
include-tagged::{query-dsl-test}[ids]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> type is optional
|
|
||||||
|
@ -31,3 +31,11 @@ instead.
|
|||||||
==== `standard` filter has been removed
|
==== `standard` filter has been removed
|
||||||
|
|
||||||
The `standard` token filter has been removed because it doesn't change anything in the stream.
|
The `standard` token filter has been removed because it doesn't change anything in the stream.
|
||||||
|
|
||||||
|
[float]
|
||||||
|
==== Deprecated standard_html_strip analyzer
|
||||||
|
|
||||||
|
The `standard_html_strip` analyzer has been deprecated, and should be replaced
|
||||||
|
with a combination of the `standard` tokenizer and `html_strip` char_filter.
|
||||||
|
Indexes created using this analyzer will still be readable in elasticsearch 7.0,
|
||||||
|
but it will not be possible to create new indexes using it.
|
@ -1,10 +1,24 @@
|
|||||||
[[recovery]]
|
[[recovery]]
|
||||||
=== Indices Recovery
|
=== Indices Recovery
|
||||||
|
|
||||||
The following _expert_ setting can be set to manage the recovery policy.
|
<<cat-recovery,Peer recovery>> is the process used to build a new copy of a
|
||||||
|
shard on a node by copying data from the primary. {es} uses this peer recovery
|
||||||
|
process to rebuild shard copies that were lost if a node has failed, and uses
|
||||||
|
the same process when migrating a shard copy between nodes to rebalance the
|
||||||
|
cluster or to honor any changes to the <<modules-cluster,shard allocation
|
||||||
|
settings>>.
|
||||||
|
|
||||||
|
The following _expert_ setting can be set to manage the resources consumed by
|
||||||
|
peer recoveries:
|
||||||
|
|
||||||
`indices.recovery.max_bytes_per_sec`::
|
`indices.recovery.max_bytes_per_sec`::
|
||||||
Defaults to `40mb`.
|
Limits the total inbound and outbound peer recovery traffic on each node.
|
||||||
|
Since this limit applies on each node, but there may be many nodes
|
||||||
|
performing peer recoveries concurrently, the total amount of peer recovery
|
||||||
|
traffic within a cluster may be much higher than this limit. If you set
|
||||||
|
this limit too high then there is a risk that ongoing peer recoveries will
|
||||||
|
consume an excess of bandwidth (or other resources) which could destabilize
|
||||||
|
the cluster. Defaults to `40mb`.
|
||||||
|
|
||||||
This setting can be dynamically updated on a live cluster with the
|
This setting can be dynamically updated on a live cluster with the
|
||||||
<<cluster-update-settings,cluster-update-settings>> API:
|
<<cluster-update-settings,cluster-update-settings>> API.
|
||||||
|
@ -81,7 +81,7 @@ GET /example/_search
|
|||||||
==== Pre-Indexed Shape
|
==== Pre-Indexed Shape
|
||||||
|
|
||||||
The Query also supports using a shape which has already been indexed in
|
The Query also supports using a shape which has already been indexed in
|
||||||
another index and/or index type. This is particularly useful for when
|
another index. This is particularly useful for when
|
||||||
you have a pre-defined list of shapes which are useful to your
|
you have a pre-defined list of shapes which are useful to your
|
||||||
application and you want to reference this using a logical name (for
|
application and you want to reference this using a logical name (for
|
||||||
example 'New Zealand') rather than having to provide their coordinates
|
example 'New Zealand') rather than having to provide their coordinates
|
||||||
@ -90,7 +90,6 @@ each time. In this situation it is only necessary to provide:
|
|||||||
* `id` - The ID of the document that containing the pre-indexed shape.
|
* `id` - The ID of the document that containing the pre-indexed shape.
|
||||||
* `index` - Name of the index where the pre-indexed shape is. Defaults
|
* `index` - Name of the index where the pre-indexed shape is. Defaults
|
||||||
to 'shapes'.
|
to 'shapes'.
|
||||||
* `type` - Index type where the pre-indexed shape is.
|
|
||||||
* `path` - The field specified as path containing the pre-indexed shape.
|
* `path` - The field specified as path containing the pre-indexed shape.
|
||||||
Defaults to 'shape'.
|
Defaults to 'shape'.
|
||||||
* `routing` - The routing of the shape document if required.
|
* `routing` - The routing of the shape document if required.
|
||||||
@ -130,7 +129,6 @@ GET /example/_search
|
|||||||
"location": {
|
"location": {
|
||||||
"indexed_shape": {
|
"indexed_shape": {
|
||||||
"index": "shapes",
|
"index": "shapes",
|
||||||
"type": "_doc",
|
|
||||||
"id": "deu",
|
"id": "deu",
|
||||||
"path": "location"
|
"path": "location"
|
||||||
}
|
}
|
||||||
|
@ -10,13 +10,9 @@ GET /_search
|
|||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"ids" : {
|
"ids" : {
|
||||||
"type" : "_doc",
|
|
||||||
"values" : ["1", "4", "100"]
|
"values" : ["1", "4", "100"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
||||||
The `type` is optional and can be omitted, and can also accept an array
|
|
||||||
of values. If no type is specified, all types defined in the index mapping are tried.
|
|
||||||
|
@ -42,12 +42,10 @@ GET /_search
|
|||||||
"like" : [
|
"like" : [
|
||||||
{
|
{
|
||||||
"_index" : "imdb",
|
"_index" : "imdb",
|
||||||
"_type" : "movies",
|
|
||||||
"_id" : "1"
|
"_id" : "1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"_index" : "imdb",
|
"_index" : "imdb",
|
||||||
"_type" : "movies",
|
|
||||||
"_id" : "2"
|
"_id" : "2"
|
||||||
},
|
},
|
||||||
"and potentially some more text here as well"
|
"and potentially some more text here as well"
|
||||||
@ -74,7 +72,6 @@ GET /_search
|
|||||||
"like" : [
|
"like" : [
|
||||||
{
|
{
|
||||||
"_index" : "marvel",
|
"_index" : "marvel",
|
||||||
"_type" : "quotes",
|
|
||||||
"doc" : {
|
"doc" : {
|
||||||
"name": {
|
"name": {
|
||||||
"first": "Ben",
|
"first": "Ben",
|
||||||
@ -85,7 +82,6 @@ GET /_search
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"_index" : "marvel",
|
"_index" : "marvel",
|
||||||
"_type" : "quotes",
|
|
||||||
"_id" : "2"
|
"_id" : "2"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -36,9 +36,6 @@ The terms lookup mechanism supports the following options:
|
|||||||
`index`::
|
`index`::
|
||||||
The index to fetch the term values from.
|
The index to fetch the term values from.
|
||||||
|
|
||||||
`type`::
|
|
||||||
The type to fetch the term values from.
|
|
||||||
|
|
||||||
`id`::
|
`id`::
|
||||||
The id of the document to fetch the term values from.
|
The id of the document to fetch the term values from.
|
||||||
|
|
||||||
@ -93,7 +90,6 @@ GET /tweets/_search
|
|||||||
"terms" : {
|
"terms" : {
|
||||||
"user" : {
|
"user" : {
|
||||||
"index" : "users",
|
"index" : "users",
|
||||||
"type" : "_doc",
|
|
||||||
"id" : "2",
|
"id" : "2",
|
||||||
"path" : "followers"
|
"path" : "followers"
|
||||||
}
|
}
|
||||||
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,6 +1,6 @@
|
|||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.0-all.zip
|
distributionUrl=https\://services.gradle.org/distributions/gradle-5.1-all.zip
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
zipStorePath=wrapper/dists
|
zipStorePath=wrapper/dists
|
||||||
distributionSha256Sum=17847c8e12b2bcfce26a79f425f082c31d4ded822f99a66127eee2d96bf18216
|
distributionSha256Sum=7b8a8b9cce0406733d2d3fab3874386c530657c73c3f4e9a3837d081e26060d8
|
||||||
|
@ -66,3 +66,12 @@ jarHell.enabled = false
|
|||||||
namingConventions {
|
namingConventions {
|
||||||
testClass = 'junit.framework.TestCase'
|
testClass = 'junit.framework.TestCase'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass 'junit.framework.TestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -171,6 +171,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
|||||||
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
|
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
|
||||||
Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> analyzers = new TreeMap<>();
|
Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> analyzers = new TreeMap<>();
|
||||||
analyzers.put("fingerprint", FingerprintAnalyzerProvider::new);
|
analyzers.put("fingerprint", FingerprintAnalyzerProvider::new);
|
||||||
|
|
||||||
|
// TODO remove in 8.0
|
||||||
analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
|
analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
|
||||||
analyzers.put("pattern", PatternAnalyzerProvider::new);
|
analyzers.put("pattern", PatternAnalyzerProvider::new);
|
||||||
analyzers.put("snowball", SnowballAnalyzerProvider::new);
|
analyzers.put("snowball", SnowballAnalyzerProvider::new);
|
||||||
@ -320,6 +322,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
|||||||
@Override
|
@Override
|
||||||
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
||||||
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
|
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
|
||||||
|
// TODO remove in 8.0
|
||||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH,
|
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH,
|
||||||
() -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET)));
|
() -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET)));
|
||||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH,
|
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH,
|
||||||
|
@ -37,7 +37,10 @@ public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase {
|
|||||||
public StandardHtmlStripAnalyzer() {
|
public StandardHtmlStripAnalyzer() {
|
||||||
super(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET);
|
super(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* @deprecated in 6.5, can not create in 7.0, and we remove this in 8.0
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
StandardHtmlStripAnalyzer(CharArraySet stopwords) {
|
StandardHtmlStripAnalyzer(CharArraySet stopwords) {
|
||||||
super(stopwords);
|
super(stopwords);
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,10 @@
|
|||||||
|
|
||||||
package org.elasticsearch.analysis.common;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.lucene.analysis.CharArraySet;
|
import org.apache.lucene.analysis.CharArraySet;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
@ -28,14 +31,29 @@ import org.elasticsearch.index.analysis.Analysis;
|
|||||||
|
|
||||||
public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider<StandardHtmlStripAnalyzer> {
|
public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider<StandardHtmlStripAnalyzer> {
|
||||||
|
|
||||||
|
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||||
|
new DeprecationLogger(LogManager.getLogger(StandardHtmlStripAnalyzerProvider.class));
|
||||||
|
|
||||||
private final StandardHtmlStripAnalyzer analyzer;
|
private final StandardHtmlStripAnalyzer analyzer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated in 6.5, can not create in 7.0, and we remove this in 8.0
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||||
super(indexSettings, name, settings);
|
super(indexSettings, name, settings);
|
||||||
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
|
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
|
||||||
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);
|
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);
|
||||||
analyzer = new StandardHtmlStripAnalyzer(stopWords);
|
analyzer = new StandardHtmlStripAnalyzer(stopWords);
|
||||||
analyzer.setVersion(version);
|
analyzer.setVersion(version);
|
||||||
|
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||||
|
throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " +
|
||||||
|
"use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
|
||||||
|
} else {
|
||||||
|
DEPRECATION_LOGGER.deprecatedAndMaybeLog("standard_html_strip_deprecation",
|
||||||
|
"Deprecated analyzer [standard_html_strip] used, " +
|
||||||
|
"replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.analysis.common;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.MockTokenizer;
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.analysis.Tokenizer;
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
@ -26,6 +27,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
||||||
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.IndexSettingsModule;
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
@ -116,4 +119,47 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||||||
assertNotNull(tokenFilterFactory.create(tokenizer));
|
assertNotNull(tokenFilterFactory.create(tokenizer));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check that the deprecated analyzer name "standard_html_strip" throws exception for indices created since 7.0.0
|
||||||
|
*/
|
||||||
|
public void testStandardHtmlStripAnalyzerDeprecationError() throws IOException {
|
||||||
|
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||||
|
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
|
||||||
|
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
|
CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin();
|
||||||
|
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> createTestAnalysis(idxSettings, settings, commonAnalysisPlugin));
|
||||||
|
assertEquals("[standard_html_strip] analyzer is not supported for new indices, " +
|
||||||
|
"use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter", ex.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check that the deprecated analyzer name "standard_html_strip" issues a deprecation warning for indices created since 6.5.0 until 7
|
||||||
|
*/
|
||||||
|
public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException {
|
||||||
|
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||||
|
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
|
||||||
|
VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
|
||||||
|
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
IndexAnalyzers analyzers = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).indexAnalyzers;
|
||||||
|
Analyzer analyzer = analyzers.get("custom_analyzer");
|
||||||
|
assertNotNull(((NamedAnalyzer) analyzer).analyzer());
|
||||||
|
assertWarnings(
|
||||||
|
"Deprecated analyzer [standard_html_strip] used, " +
|
||||||
|
"replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,14 +69,15 @@
|
|||||||
|
|
||||||
---
|
---
|
||||||
"standard_html_strip":
|
"standard_html_strip":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: only starting from version 7.x this throws an error
|
||||||
- do:
|
- do:
|
||||||
|
catch: /\[standard_html_strip\] analyzer is not supported for new indices, use a custom analyzer using \[standard\] tokenizer and \[html_strip\] char_filter, plus \[lowercase\] filter/
|
||||||
indices.analyze:
|
indices.analyze:
|
||||||
body:
|
body:
|
||||||
text: <bold/> <italic/>
|
text: <bold/> <italic/>
|
||||||
analyzer: standard_html_strip
|
analyzer: standard_html_strip
|
||||||
- length: { tokens: 2 }
|
|
||||||
- match: { tokens.0.token: bold }
|
|
||||||
- match: { tokens.1.token: italic }
|
|
||||||
|
|
||||||
---
|
---
|
||||||
"pattern":
|
"pattern":
|
||||||
|
@ -537,7 +537,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
public void testQueryWithRewrite() throws Exception {
|
public void testQueryWithRewrite() throws Exception {
|
||||||
addQueryFieldMappings();
|
addQueryFieldMappings();
|
||||||
client().prepareIndex("remote", "doc", "1").setSource("field", "value").get();
|
client().prepareIndex("remote", "doc", "1").setSource("field", "value").get();
|
||||||
QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field"));
|
QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field"));
|
||||||
ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1",
|
ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1",
|
||||||
BytesReference.bytes(XContentFactory
|
BytesReference.bytes(XContentFactory
|
||||||
.jsonBuilder()
|
.jsonBuilder()
|
||||||
|
@ -42,3 +42,9 @@ integTestCluster {
|
|||||||
integTestRunner {
|
integTestRunner {
|
||||||
systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }"
|
systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions.naming {
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.test.ESTestCase'
|
||||||
|
}
|
||||||
|
}
|
@ -40,3 +40,13 @@ singleNodeIntegTestCluster {
|
|||||||
integTestCluster.dependsOn(singleNodeIntegTestRunner, 'singleNodeIntegTestCluster#stop')
|
integTestCluster.dependsOn(singleNodeIntegTestRunner, 'singleNodeIntegTestCluster#stop')
|
||||||
|
|
||||||
check.dependsOn(integTest)
|
check.dependsOn(integTest)
|
||||||
|
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.smoketest.ESSmokeClientTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -27,3 +27,11 @@ dependencies {
|
|||||||
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
|
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
|
||||||
testCompile project(path: ':modules:reindex', configuration: 'runtime')
|
testCompile project(path: ':modules:reindex', configuration: 'runtime')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming {
|
||||||
|
IT {
|
||||||
|
baseClass 'org.elasticsearch.ingest.AbstractScriptTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -217,3 +217,14 @@ dependencyLicenses.enabled = false
|
|||||||
dependenciesInfo.enabled = false
|
dependenciesInfo.enabled = false
|
||||||
|
|
||||||
thirdPartyAudit.enabled = false
|
thirdPartyAudit.enabled = false
|
||||||
|
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
// We only have one "special" integration test here to connect to wildfly
|
||||||
|
naming {
|
||||||
|
IT {
|
||||||
|
baseClass 'org.apache.lucene.util.LuceneTestCase'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -48,7 +48,7 @@
|
|||||||
search:
|
search:
|
||||||
rest_total_hits_as_int: true
|
rest_total_hits_as_int: true
|
||||||
index: test_index
|
index: test_index
|
||||||
body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}}
|
body: {"query" : {"terms" : {"user" : {"index" : "test_index", "id" : "u1", "path" : "followers"}}}}
|
||||||
- match: { hits.total: 2 }
|
- match: { hits.total: 2 }
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
@ -56,4 +56,4 @@
|
|||||||
search:
|
search:
|
||||||
rest_total_hits_as_int: true
|
rest_total_hits_as_int: true
|
||||||
index: test_index
|
index: test_index
|
||||||
body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}}
|
body: {"query" : {"terms" : {"user" : {"index" : "test_index", "id" : "u2", "path" : "followers"}}}}
|
||||||
|
@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
"Terms Query with No.of terms exceeding index.max_terms_count should FAIL":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: index.max_terms_count setting has been added in 7.0.0
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test_index
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
number_of_shards: 1
|
||||||
|
index.max_terms_count: 2
|
||||||
|
mappings:
|
||||||
|
test_type:
|
||||||
|
properties:
|
||||||
|
user:
|
||||||
|
type: keyword
|
||||||
|
followers:
|
||||||
|
type: keyword
|
||||||
|
- do:
|
||||||
|
bulk:
|
||||||
|
refresh: true
|
||||||
|
body:
|
||||||
|
- '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u1"}}'
|
||||||
|
- '{"user": "u1", "followers": ["u2", "u3"]}'
|
||||||
|
- '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u2"}}'
|
||||||
|
- '{"user": "u2", "followers": ["u1", "u3", "u4"]}'
|
||||||
|
- '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u3"}}'
|
||||||
|
- '{"user": "u3", "followers": ["u1"]}'
|
||||||
|
- '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u4"}}'
|
||||||
|
- '{"user": "u4", "followers": ["u3"]}'
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
rest_total_hits_as_int: true
|
||||||
|
index: test_index
|
||||||
|
body: {"query" : {"terms" : {"user" : ["u1", "u2"]}}}
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
search:
|
||||||
|
rest_total_hits_as_int: true
|
||||||
|
index: test_index
|
||||||
|
body: {"query" : {"terms" : {"user" : ["u1", "u2", "u3"]}}}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
rest_total_hits_as_int: true
|
||||||
|
index: test_index
|
||||||
|
body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}}
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
search:
|
||||||
|
rest_total_hits_as_int: true
|
||||||
|
index: test_index
|
||||||
|
body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}}
|
@ -161,6 +161,19 @@ forbiddenPatterns {
|
|||||||
exclude '**/*.st'
|
exclude '**/*.st'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
testingConventions {
|
||||||
|
naming.clear()
|
||||||
|
naming {
|
||||||
|
Tests {
|
||||||
|
baseClass "org.apache.lucene.util.LuceneTestCase"
|
||||||
|
}
|
||||||
|
IT {
|
||||||
|
baseClass "org.elasticsearch.test.ESIntegTestCase"
|
||||||
|
baseClass "org.elasticsearch.test.ESSingleNodeTestCase"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
task generateModulesList {
|
task generateModulesList {
|
||||||
List<String> modules = project(':modules').subprojects.collect { it.name }
|
List<String> modules = project(':modules').subprojects.collect { it.name }
|
||||||
modules.add('x-pack')
|
modules.add('x-pack')
|
||||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.analysis;
|
|||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
@ -130,7 +131,13 @@ public final class AnalysisRegistry implements Closeable {
|
|||||||
throw new ElasticsearchException("failed to load analyzer for name " + key, ex);
|
throw new ElasticsearchException("failed to load analyzer for name " + key, ex);
|
||||||
}}
|
}}
|
||||||
);
|
);
|
||||||
|
} else if ("standard_html_strip".equals(analyzer)) {
|
||||||
|
if (Version.CURRENT.onOrAfter(Version.V_7_0_0)) {
|
||||||
|
throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " +
|
||||||
|
"use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return analyzerProvider.get(environment, analyzer).get();
|
return analyzerProvider.get(environment, analyzer).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.lucene.document.LatLonShape;
|
import org.apache.lucene.document.LatLonShape;
|
||||||
import org.apache.lucene.geo.Line;
|
import org.apache.lucene.geo.Line;
|
||||||
import org.apache.lucene.geo.Polygon;
|
import org.apache.lucene.geo.Polygon;
|
||||||
@ -38,6 +39,7 @@ import org.elasticsearch.action.ActionListener;
|
|||||||
import org.elasticsearch.action.get.GetRequest;
|
import org.elasticsearch.action.get.GetRequest;
|
||||||
import org.elasticsearch.action.get.GetResponse;
|
import org.elasticsearch.action.get.GetResponse;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
@ -48,6 +50,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
|||||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
@ -66,6 +69,10 @@ import java.util.function.Supplier;
|
|||||||
*/
|
*/
|
||||||
public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> {
|
public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> {
|
||||||
public static final String NAME = "geo_shape";
|
public static final String NAME = "geo_shape";
|
||||||
|
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
|
||||||
|
LogManager.getLogger(GeoShapeQueryBuilder.class));
|
||||||
|
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [geo_shape] queries. " +
|
||||||
|
"The type should no longer be specified in the [indexed_shape] section.";
|
||||||
|
|
||||||
public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes";
|
public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes";
|
||||||
public static final String DEFAULT_SHAPE_FIELD_NAME = "shape";
|
public static final String DEFAULT_SHAPE_FIELD_NAME = "shape";
|
||||||
@ -119,6 +126,19 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
this(fieldName, shape, null, null);
|
this(fieldName, shape, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
|
||||||
|
* field name and will use the Shape found with the given ID
|
||||||
|
*
|
||||||
|
* @param fieldName
|
||||||
|
* Name of the field that will be filtered
|
||||||
|
* @param indexedShapeId
|
||||||
|
* ID of the indexed Shape that will be used in the Query
|
||||||
|
*/
|
||||||
|
public GeoShapeQueryBuilder(String fieldName, String indexedShapeId) {
|
||||||
|
this(fieldName, (ShapeBuilder) null, indexedShapeId, null);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
|
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
|
||||||
* field name and will use the Shape found with the given ID in the given
|
* field name and will use the Shape found with the given ID in the given
|
||||||
@ -130,20 +150,19 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
* ID of the indexed Shape that will be used in the Query
|
* ID of the indexed Shape that will be used in the Query
|
||||||
* @param indexedShapeType
|
* @param indexedShapeType
|
||||||
* Index type of the indexed Shapes
|
* Index type of the indexed Shapes
|
||||||
|
* @deprecated use {@link #GeoShapeQueryBuilder(String, String)} instead
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) {
|
public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) {
|
||||||
this(fieldName, (ShapeBuilder) null, indexedShapeId, indexedShapeType);
|
this(fieldName, (ShapeBuilder) null, indexedShapeId, indexedShapeType);
|
||||||
}
|
}
|
||||||
|
|
||||||
private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) {
|
private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, @Nullable String indexedShapeType) {
|
||||||
if (fieldName == null) {
|
if (fieldName == null) {
|
||||||
throw new IllegalArgumentException("fieldName is required");
|
throw new IllegalArgumentException("fieldName is required");
|
||||||
}
|
}
|
||||||
if (shape == null && indexedShapeId == null) {
|
if (shape == null && indexedShapeId == null) {
|
||||||
throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required");
|
throw new IllegalArgumentException("either shape or indexedShapeId is required");
|
||||||
}
|
|
||||||
if (indexedShapeId != null && indexedShapeType == null) {
|
|
||||||
throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified");
|
|
||||||
}
|
}
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
this.shape = shape;
|
this.shape = shape;
|
||||||
@ -152,7 +171,8 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
this.supplier = null;
|
this.supplier = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private GeoShapeQueryBuilder(String fieldName, Supplier<ShapeBuilder> supplier, String indexedShapeId, String indexedShapeType) {
|
private GeoShapeQueryBuilder(String fieldName, Supplier<ShapeBuilder> supplier, String indexedShapeId,
|
||||||
|
@Nullable String indexedShapeType) {
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
this.shape = null;
|
this.shape = null;
|
||||||
this.supplier = supplier;
|
this.supplier = supplier;
|
||||||
@ -238,7 +258,10 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
/**
|
/**
|
||||||
* @return the document type of the indexed Shape that will be used in the
|
* @return the document type of the indexed Shape that will be used in the
|
||||||
* Query
|
* Query
|
||||||
|
*
|
||||||
|
* @deprecated Types are in the process of being removed.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public String indexedShapeType() {
|
public String indexedShapeType() {
|
||||||
return indexedShapeType;
|
return indexedShapeType;
|
||||||
}
|
}
|
||||||
@ -566,8 +589,10 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
shape.toXContent(builder, params);
|
shape.toXContent(builder, params);
|
||||||
} else {
|
} else {
|
||||||
builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName())
|
builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName())
|
||||||
.field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId)
|
.field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId);
|
||||||
.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType);
|
if (indexedShapeType != null) {
|
||||||
|
builder.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType);
|
||||||
|
}
|
||||||
if (indexedShapeIndex != null) {
|
if (indexedShapeIndex != null) {
|
||||||
builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex);
|
builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex);
|
||||||
}
|
}
|
||||||
@ -677,6 +702,11 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
GeoShapeQueryBuilder builder;
|
GeoShapeQueryBuilder builder;
|
||||||
|
if (type != null) {
|
||||||
|
deprecationLogger.deprecatedAndMaybeLog(
|
||||||
|
"geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||||
|
}
|
||||||
|
|
||||||
if (shape != null) {
|
if (shape != null) {
|
||||||
builder = new GeoShapeQueryBuilder(fieldName, shape);
|
builder = new GeoShapeQueryBuilder(fieldName, shape);
|
||||||
} else {
|
} else {
|
||||||
@ -739,7 +769,12 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||||||
} else if (this.shape == null) {
|
} else if (this.shape == null) {
|
||||||
SetOnce<ShapeBuilder> supplier = new SetOnce<>();
|
SetOnce<ShapeBuilder> supplier = new SetOnce<>();
|
||||||
queryRewriteContext.registerAsyncAction((client, listener) -> {
|
queryRewriteContext.registerAsyncAction((client, listener) -> {
|
||||||
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
|
GetRequest getRequest;
|
||||||
|
if (indexedShapeType == null) {
|
||||||
|
getRequest = new GetRequest(indexedShapeIndex, indexedShapeId);
|
||||||
|
} else {
|
||||||
|
getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
|
||||||
|
}
|
||||||
getRequest.routing(indexedShapeRouting);
|
getRequest.routing(indexedShapeRouting);
|
||||||
fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder-> {
|
fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder-> {
|
||||||
supplier.set(builder);
|
supplier.set(builder);
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
@ -27,6 +28,7 @@ import org.elasticsearch.common.ParsingException;
|
|||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
@ -51,6 +53,9 @@ import static org.elasticsearch.common.xcontent.ObjectParser.fromList;
|
|||||||
*/
|
*/
|
||||||
public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
||||||
public static final String NAME = "ids";
|
public static final String NAME = "ids";
|
||||||
|
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
|
||||||
|
LogManager.getLogger(IdsQueryBuilder.class));
|
||||||
|
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [ids] queries.";
|
||||||
|
|
||||||
private static final ParseField TYPE_FIELD = new ParseField("type");
|
private static final ParseField TYPE_FIELD = new ParseField("type");
|
||||||
private static final ParseField VALUES_FIELD = new ParseField("values");
|
private static final ParseField VALUES_FIELD = new ParseField("values");
|
||||||
@ -83,8 +88,10 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Add types to query
|
* Add types to query
|
||||||
|
*
|
||||||
|
* @deprecated Types are in the process of being removed, prefer to filter on a field instead.
|
||||||
*/
|
*/
|
||||||
// TODO: Remove
|
@Deprecated
|
||||||
public IdsQueryBuilder types(String... types) {
|
public IdsQueryBuilder types(String... types) {
|
||||||
if (types == null) {
|
if (types == null) {
|
||||||
throw new IllegalArgumentException("[" + NAME + "] types cannot be null");
|
throw new IllegalArgumentException("[" + NAME + "] types cannot be null");
|
||||||
@ -95,7 +102,10 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the types used in this query
|
* Returns the types used in this query
|
||||||
|
*
|
||||||
|
* @deprecated Types are in the process of being removed, prefer to filter on a field instead.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public String[] types() {
|
public String[] types() {
|
||||||
return this.types;
|
return this.types;
|
||||||
}
|
}
|
||||||
@ -121,7 +131,9 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(NAME);
|
builder.startObject(NAME);
|
||||||
builder.array(TYPE_FIELD.getPreferredName(), types);
|
if (types.length > 0) {
|
||||||
|
builder.array(TYPE_FIELD.getPreferredName(), types);
|
||||||
|
}
|
||||||
builder.startArray(VALUES_FIELD.getPreferredName());
|
builder.startArray(VALUES_FIELD.getPreferredName());
|
||||||
for (String value : ids) {
|
for (String value : ids) {
|
||||||
builder.value(value);
|
builder.value(value);
|
||||||
@ -142,7 +154,11 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
|
|
||||||
public static IdsQueryBuilder fromXContent(XContentParser parser) {
|
public static IdsQueryBuilder fromXContent(XContentParser parser) {
|
||||||
try {
|
try {
|
||||||
return PARSER.apply(parser, null);
|
IdsQueryBuilder builder = PARSER.apply(parser, null);
|
||||||
|
if (builder.types().length > 0) {
|
||||||
|
deprecationLogger.deprecatedAndMaybeLog("ids_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||||
|
}
|
||||||
|
return builder;
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e);
|
throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e);
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.index.Fields;
|
import org.apache.lucene.index.Fields;
|
||||||
import org.apache.lucene.search.BooleanClause;
|
import org.apache.lucene.search.BooleanClause;
|
||||||
@ -41,6 +42,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||||
import org.elasticsearch.common.lucene.search.XMoreLikeThis;
|
import org.elasticsearch.common.lucene.search.XMoreLikeThis;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
@ -53,6 +55,7 @@ import org.elasticsearch.index.VersionType;
|
|||||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
|
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -66,6 +69,7 @@ import java.util.Map;
|
|||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
|
||||||
@ -76,6 +80,11 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||||||
*/
|
*/
|
||||||
public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQueryBuilder> {
|
public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQueryBuilder> {
|
||||||
public static final String NAME = "more_like_this";
|
public static final String NAME = "more_like_this";
|
||||||
|
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
|
||||||
|
LogManager.getLogger(MoreLikeThisQueryBuilder.class));
|
||||||
|
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [more_like_this] " +
|
||||||
|
"queries. The type should no longer be specified in the [like] and [unlike] sections.";
|
||||||
|
|
||||||
|
|
||||||
public static final int DEFAULT_MAX_QUERY_TERMS = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS;
|
public static final int DEFAULT_MAX_QUERY_TERMS = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS;
|
||||||
public static final int DEFAULT_MIN_TERM_FREQ = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ;
|
public static final int DEFAULT_MIN_TERM_FREQ = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ;
|
||||||
@ -182,9 +191,41 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||||||
* Constructor for a given item / document request
|
* Constructor for a given item / document request
|
||||||
*
|
*
|
||||||
* @param index the index where the document is located
|
* @param index the index where the document is located
|
||||||
* @param type the type of the document
|
|
||||||
* @param id and its id
|
* @param id and its id
|
||||||
*/
|
*/
|
||||||
|
public Item(@Nullable String index, String id) {
|
||||||
|
if (id == null) {
|
||||||
|
throw new IllegalArgumentException("Item requires id to be non-null");
|
||||||
|
}
|
||||||
|
this.index = index;
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor for an artificial document request, that is not present in the index.
|
||||||
|
*
|
||||||
|
* @param index the index to be used for parsing the doc
|
||||||
|
* @param doc the document specification
|
||||||
|
*/
|
||||||
|
public Item(@Nullable String index, XContentBuilder doc) {
|
||||||
|
if (doc == null) {
|
||||||
|
throw new IllegalArgumentException("Item requires doc to be non-null");
|
||||||
|
}
|
||||||
|
this.index = index;
|
||||||
|
this.doc = BytesReference.bytes(doc);
|
||||||
|
this.xContentType = doc.contentType();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor for a given item / document request
|
||||||
|
*
|
||||||
|
* @param index the index where the document is located
|
||||||
|
* @param type the type of the document
|
||||||
|
* @param id and its id
|
||||||
|
*
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link Item(String, String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public Item(@Nullable String index, @Nullable String type, String id) {
|
public Item(@Nullable String index, @Nullable String type, String id) {
|
||||||
if (id == null) {
|
if (id == null) {
|
||||||
throw new IllegalArgumentException("Item requires id to be non-null");
|
throw new IllegalArgumentException("Item requires id to be non-null");
|
||||||
@ -200,7 +241,10 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||||||
* @param index the index to be used for parsing the doc
|
* @param index the index to be used for parsing the doc
|
||||||
* @param type the type to be used for parsing the doc
|
* @param type the type to be used for parsing the doc
|
||||||
* @param doc the document specification
|
* @param doc the document specification
|
||||||
|
*
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link Item(String, XContentBuilder)} instead.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) {
|
public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) {
|
||||||
if (doc == null) {
|
if (doc == null) {
|
||||||
throw new IllegalArgumentException("Item requires doc to be non-null");
|
throw new IllegalArgumentException("Item requires doc to be non-null");
|
||||||
@ -257,10 +301,18 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public String type() {
|
public String type() {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public Item type(String type) {
|
public Item type(String type) {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
return this;
|
return this;
|
||||||
@ -912,9 +964,18 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||||||
if (stopWords != null) {
|
if (stopWords != null) {
|
||||||
moreLikeThisQueryBuilder.stopWords(stopWords);
|
moreLikeThisQueryBuilder.stopWords(stopWords);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (moreLikeThisQueryBuilder.isTypeless() == false) {
|
||||||
|
deprecationLogger.deprecatedAndMaybeLog("more_like_this_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||||
|
}
|
||||||
return moreLikeThisQueryBuilder;
|
return moreLikeThisQueryBuilder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isTypeless() {
|
||||||
|
return Stream.concat(Arrays.stream(likeItems), Arrays.stream(unlikeItems))
|
||||||
|
.allMatch(item -> item.type == null);
|
||||||
|
}
|
||||||
|
|
||||||
private static void parseLikeField(XContentParser parser, List<String> texts, List<Item> items) throws IOException {
|
private static void parseLikeField(XContentParser parser, List<String> texts, List<Item> items) throws IOException {
|
||||||
if (parser.currentToken().isValue()) {
|
if (parser.currentToken().isValue()) {
|
||||||
texts.add(parser.text());
|
texts.add(parser.text());
|
||||||
@ -1065,12 +1126,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||||||
item.index(context.index().getName());
|
item.index(context.index().getName());
|
||||||
}
|
}
|
||||||
if (item.type() == null) {
|
if (item.type() == null) {
|
||||||
if (context.queryTypes().size() > 1) {
|
item.type(MapperService.SINGLE_MAPPING_NAME);
|
||||||
throw new QueryShardException(context,
|
|
||||||
"ambiguous type for item with id: " + item.id() + " and index: " + item.index());
|
|
||||||
} else {
|
|
||||||
item.type(context.queryTypes().iterator().next());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// default fields if not present but don't override for artificial docs
|
// default fields if not present but don't override for artificial docs
|
||||||
if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) {
|
if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) {
|
||||||
|
@ -121,7 +121,10 @@ public final class QueryBuilders {
|
|||||||
* Constructs a query that will match only specific ids within types.
|
* Constructs a query that will match only specific ids within types.
|
||||||
*
|
*
|
||||||
* @param types The mapping/doc type
|
* @param types The mapping/doc type
|
||||||
|
*
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link #idsQuery()} instead.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public static IdsQueryBuilder idsQuery(String... types) {
|
public static IdsQueryBuilder idsQuery(String... types) {
|
||||||
return new IdsQueryBuilder().types(types);
|
return new IdsQueryBuilder().types(types);
|
||||||
}
|
}
|
||||||
@ -646,6 +649,14 @@ public final class QueryBuilders {
|
|||||||
return new GeoShapeQueryBuilder(name, shape);
|
return new GeoShapeQueryBuilder(name, shape);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId) {
|
||||||
|
return new GeoShapeQueryBuilder(name, indexedShapeId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link #geoShapeQuery(String, String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) {
|
public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) {
|
||||||
return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType);
|
return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType);
|
||||||
}
|
}
|
||||||
@ -662,6 +673,16 @@ public final class QueryBuilders {
|
|||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId) {
|
||||||
|
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId);
|
||||||
|
builder.relation(ShapeRelation.INTERSECTS);
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link #geoIntersectionQuery(String, String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId, String indexedShapeType) {
|
public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId, String indexedShapeType) {
|
||||||
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType);
|
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType);
|
||||||
builder.relation(ShapeRelation.INTERSECTS);
|
builder.relation(ShapeRelation.INTERSECTS);
|
||||||
@ -680,6 +701,16 @@ public final class QueryBuilders {
|
|||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId) {
|
||||||
|
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId);
|
||||||
|
builder.relation(ShapeRelation.WITHIN);
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link #geoWithinQuery(String, String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId, String indexedShapeType) {
|
public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId, String indexedShapeType) {
|
||||||
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType);
|
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType);
|
||||||
builder.relation(ShapeRelation.WITHIN);
|
builder.relation(ShapeRelation.WITHIN);
|
||||||
@ -698,6 +729,16 @@ public final class QueryBuilders {
|
|||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId) {
|
||||||
|
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId);
|
||||||
|
builder.relation(ShapeRelation.DISJOINT);
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link #geoDisjointQuery(String, String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId, String indexedShapeType) {
|
public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId, String indexedShapeType) {
|
||||||
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType);
|
GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType);
|
||||||
builder.relation(ShapeRelation.DISJOINT);
|
builder.relation(ShapeRelation.DISJOINT);
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermInSetQuery;
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
@ -34,6 +35,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.lucene.BytesRefs;
|
import org.elasticsearch.common.lucene.BytesRefs;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
@ -63,6 +65,11 @@ import java.util.stream.IntStream;
|
|||||||
public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
||||||
public static final String NAME = "terms";
|
public static final String NAME = "terms";
|
||||||
|
|
||||||
|
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
|
||||||
|
LogManager.getLogger(TermsQueryBuilder.class));
|
||||||
|
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated " +
|
||||||
|
"in [terms] lookup queries.";
|
||||||
|
|
||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
private final List<?> values;
|
private final List<?> values;
|
||||||
private final TermsLookup termsLookup;
|
private final TermsLookup termsLookup;
|
||||||
@ -211,6 +218,10 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||||||
return this.termsLookup;
|
return this.termsLookup;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isTypeless() {
|
||||||
|
return termsLookup == null || termsLookup.type() == null;
|
||||||
|
}
|
||||||
|
|
||||||
private static final Set<Class<? extends Number>> INTEGER_TYPES = new HashSet<>(
|
private static final Set<Class<? extends Number>> INTEGER_TYPES = new HashSet<>(
|
||||||
Arrays.asList(Byte.class, Short.class, Integer.class, Long.class));
|
Arrays.asList(Byte.class, Short.class, Integer.class, Long.class));
|
||||||
private static final Set<Class<?>> STRING_TYPES = new HashSet<>(
|
private static final Set<Class<?>> STRING_TYPES = new HashSet<>(
|
||||||
@ -391,9 +402,16 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query requires a field name, " +
|
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query requires a field name, " +
|
||||||
"followed by array of terms or a document lookup specification");
|
"followed by array of terms or a document lookup specification");
|
||||||
}
|
}
|
||||||
return new TermsQueryBuilder(fieldName, values, termsLookup)
|
|
||||||
.boost(boost)
|
TermsQueryBuilder builder = new TermsQueryBuilder(fieldName, values, termsLookup)
|
||||||
.queryName(queryName);
|
.boost(boost)
|
||||||
|
.queryName(queryName);
|
||||||
|
|
||||||
|
if (builder.isTypeless() == false) {
|
||||||
|
deprecationLogger.deprecatedAndMaybeLog("terms_lookup_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
static List<Object> parseValues(XContentParser parser) throws IOException {
|
static List<Object> parseValues(XContentParser parser) throws IOException {
|
||||||
@ -442,8 +460,10 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void fetch(TermsLookup termsLookup, Client client, ActionListener<List<Object>> actionListener) {
|
private void fetch(TermsLookup termsLookup, Client client, ActionListener<List<Object>> actionListener) {
|
||||||
GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id())
|
GetRequest getRequest = termsLookup.type() == null
|
||||||
.preference("_local").routing(termsLookup.routing());
|
? new GetRequest(termsLookup.index(), termsLookup.id())
|
||||||
|
: new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id());
|
||||||
|
getRequest.preference("_local").routing(termsLookup.routing());
|
||||||
client.get(getRequest, new ActionListener<GetResponse>() {
|
client.get(getRequest, new ActionListener<GetResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(GetResponse getResponse) {
|
public void onResponse(GetResponse getResponse) {
|
||||||
|
@ -20,11 +20,11 @@
|
|||||||
package org.elasticsearch.indices;
|
package org.elasticsearch.indices;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
@ -38,18 +38,24 @@ import java.util.Objects;
|
|||||||
*/
|
*/
|
||||||
public class TermsLookup implements Writeable, ToXContentFragment {
|
public class TermsLookup implements Writeable, ToXContentFragment {
|
||||||
private final String index;
|
private final String index;
|
||||||
private final String type;
|
private @Nullable String type;
|
||||||
private final String id;
|
private final String id;
|
||||||
private final String path;
|
private final String path;
|
||||||
private String routing;
|
private String routing;
|
||||||
|
|
||||||
|
|
||||||
|
public TermsLookup(String index, String id, String path) {
|
||||||
|
this(index, null, id, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed, use {@link TermsLookup(String, String, String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public TermsLookup(String index, String type, String id, String path) {
|
public TermsLookup(String index, String type, String id, String path) {
|
||||||
if (id == null) {
|
if (id == null) {
|
||||||
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id.");
|
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id.");
|
||||||
}
|
}
|
||||||
if (type == null) {
|
|
||||||
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the type.");
|
|
||||||
}
|
|
||||||
if (path == null) {
|
if (path == null) {
|
||||||
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the path.");
|
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the path.");
|
||||||
}
|
}
|
||||||
@ -66,7 +72,12 @@ public class TermsLookup implements Writeable, ToXContentFragment {
|
|||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public TermsLookup(StreamInput in) throws IOException {
|
public TermsLookup(StreamInput in) throws IOException {
|
||||||
type = in.readString();
|
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||||
|
type = in.readOptionalString();
|
||||||
|
} else {
|
||||||
|
// Before 7.0, the type parameter was always non-null and serialized as a (non-optional) string.
|
||||||
|
type = in.readString();
|
||||||
|
}
|
||||||
id = in.readString();
|
id = in.readString();
|
||||||
path = in.readString();
|
path = in.readString();
|
||||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||||
@ -82,7 +93,16 @@ public class TermsLookup implements Writeable, ToXContentFragment {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeString(type);
|
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||||
|
out.writeOptionalString(type);
|
||||||
|
} else {
|
||||||
|
if (type == null) {
|
||||||
|
throw new IllegalArgumentException("Typeless [terms] lookup queries are not supported if any " +
|
||||||
|
"node is running a version before 7.0.");
|
||||||
|
|
||||||
|
}
|
||||||
|
out.writeString(type);
|
||||||
|
}
|
||||||
out.writeString(id);
|
out.writeString(id);
|
||||||
out.writeString(path);
|
out.writeString(path);
|
||||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||||
@ -97,6 +117,10 @@ public class TermsLookup implements Writeable, ToXContentFragment {
|
|||||||
return index;
|
return index;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Types are in the process of being removed.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public String type() {
|
public String type() {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
@ -155,18 +179,28 @@ public class TermsLookup implements Writeable, ToXContentFragment {
|
|||||||
+ token + "] after [" + currentFieldName + "]");
|
+ token + "] after [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return new TermsLookup(index, type, id, path).routing(routing);
|
if (type == null) {
|
||||||
|
return new TermsLookup(index, id, path).routing(routing);
|
||||||
|
} else {
|
||||||
|
return new TermsLookup(index, type, id, path).routing(routing);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return index + "/" + type + "/" + id + "/" + path;
|
if (type == null) {
|
||||||
|
return index + "/" + id + "/" + path;
|
||||||
|
} else {
|
||||||
|
return index + "/" + type + "/" + id + "/" + path;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.field("index", index);
|
builder.field("index", index);
|
||||||
builder.field("type", type);
|
if (type != null) {
|
||||||
|
builder.field("type", type);
|
||||||
|
}
|
||||||
builder.field("id", id);
|
builder.field("id", id);
|
||||||
builder.field("path", path);
|
builder.field("path", path);
|
||||||
if (routing != null) {
|
if (routing != null) {
|
||||||
|
@ -304,9 +304,7 @@ public class PeerRecoveryTargetService implements IndexEventListener {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RecoveryResponse read(StreamInput in) throws IOException {
|
public RecoveryResponse read(StreamInput in) throws IOException {
|
||||||
RecoveryResponse recoveryResponse = new RecoveryResponse();
|
return new RecoveryResponse(in);
|
||||||
recoveryResponse.readFrom(in);
|
|
||||||
return recoveryResponse;
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
@ -24,53 +24,46 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||||||
import org.elasticsearch.transport.TransportResponse;
|
import org.elasticsearch.transport.TransportResponse;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
class RecoveryResponse extends TransportResponse {
|
final class RecoveryResponse extends TransportResponse {
|
||||||
|
|
||||||
List<String> phase1FileNames = new ArrayList<>();
|
final List<String> phase1FileNames;
|
||||||
List<Long> phase1FileSizes = new ArrayList<>();
|
final List<Long> phase1FileSizes;
|
||||||
List<String> phase1ExistingFileNames = new ArrayList<>();
|
final List<String> phase1ExistingFileNames;
|
||||||
List<Long> phase1ExistingFileSizes = new ArrayList<>();
|
final List<Long> phase1ExistingFileSizes;
|
||||||
long phase1TotalSize;
|
final long phase1TotalSize;
|
||||||
long phase1ExistingTotalSize;
|
final long phase1ExistingTotalSize;
|
||||||
long phase1Time;
|
final long phase1Time;
|
||||||
long phase1ThrottlingWaitTime;
|
final long phase1ThrottlingWaitTime;
|
||||||
|
|
||||||
long startTime;
|
final long startTime;
|
||||||
|
|
||||||
int phase2Operations;
|
final int phase2Operations;
|
||||||
long phase2Time;
|
final long phase2Time;
|
||||||
|
|
||||||
RecoveryResponse() {
|
RecoveryResponse(List<String> phase1FileNames, List<Long> phase1FileSizes, List<String> phase1ExistingFileNames,
|
||||||
|
List<Long> phase1ExistingFileSizes, long phase1TotalSize, long phase1ExistingTotalSize,
|
||||||
|
long phase1Time, long phase1ThrottlingWaitTime, long startTime, int phase2Operations, long phase2Time) {
|
||||||
|
this.phase1FileNames = phase1FileNames;
|
||||||
|
this.phase1FileSizes = phase1FileSizes;
|
||||||
|
this.phase1ExistingFileNames = phase1ExistingFileNames;
|
||||||
|
this.phase1ExistingFileSizes = phase1ExistingFileSizes;
|
||||||
|
this.phase1TotalSize = phase1TotalSize;
|
||||||
|
this.phase1ExistingTotalSize = phase1ExistingTotalSize;
|
||||||
|
this.phase1Time = phase1Time;
|
||||||
|
this.phase1ThrottlingWaitTime = phase1ThrottlingWaitTime;
|
||||||
|
this.startTime = startTime;
|
||||||
|
this.phase2Operations = phase2Operations;
|
||||||
|
this.phase2Time = phase2Time;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
RecoveryResponse(StreamInput in) throws IOException {
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
super(in);
|
||||||
super.readFrom(in);
|
phase1FileNames = in.readList(StreamInput::readString);
|
||||||
int size = in.readVInt();
|
phase1FileSizes = in.readList(StreamInput::readVLong);
|
||||||
phase1FileNames = new ArrayList<>(size);
|
phase1ExistingFileNames = in.readList(StreamInput::readString);
|
||||||
for (int i = 0; i < size; i++) {
|
phase1ExistingFileSizes = in.readList(StreamInput::readVLong);
|
||||||
phase1FileNames.add(in.readString());
|
|
||||||
}
|
|
||||||
size = in.readVInt();
|
|
||||||
phase1FileSizes = new ArrayList<>(size);
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
phase1FileSizes.add(in.readVLong());
|
|
||||||
}
|
|
||||||
|
|
||||||
size = in.readVInt();
|
|
||||||
phase1ExistingFileNames = new ArrayList<>(size);
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
phase1ExistingFileNames.add(in.readString());
|
|
||||||
}
|
|
||||||
size = in.readVInt();
|
|
||||||
phase1ExistingFileSizes = new ArrayList<>(size);
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
phase1ExistingFileSizes.add(in.readVLong());
|
|
||||||
}
|
|
||||||
|
|
||||||
phase1TotalSize = in.readVLong();
|
phase1TotalSize = in.readVLong();
|
||||||
phase1ExistingTotalSize = in.readVLong();
|
phase1ExistingTotalSize = in.readVLong();
|
||||||
phase1Time = in.readVLong();
|
phase1Time = in.readVLong();
|
||||||
@ -83,24 +76,10 @@ class RecoveryResponse extends TransportResponse {
|
|||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
super.writeTo(out);
|
super.writeTo(out);
|
||||||
out.writeVInt(phase1FileNames.size());
|
out.writeStringList(phase1FileNames);
|
||||||
for (String name : phase1FileNames) {
|
out.writeCollection(phase1FileSizes, StreamOutput::writeVLong);
|
||||||
out.writeString(name);
|
out.writeStringList(phase1ExistingFileNames);
|
||||||
}
|
out.writeCollection(phase1ExistingFileSizes, StreamOutput::writeVLong);
|
||||||
out.writeVInt(phase1FileSizes.size());
|
|
||||||
for (long size : phase1FileSizes) {
|
|
||||||
out.writeVLong(size);
|
|
||||||
}
|
|
||||||
|
|
||||||
out.writeVInt(phase1ExistingFileNames.size());
|
|
||||||
for (String name : phase1ExistingFileNames) {
|
|
||||||
out.writeString(name);
|
|
||||||
}
|
|
||||||
out.writeVInt(phase1ExistingFileSizes.size());
|
|
||||||
for (long size : phase1ExistingFileSizes) {
|
|
||||||
out.writeVLong(size);
|
|
||||||
}
|
|
||||||
|
|
||||||
out.writeVLong(phase1TotalSize);
|
out.writeVLong(phase1TotalSize);
|
||||||
out.writeVLong(phase1ExistingTotalSize);
|
out.writeVLong(phase1ExistingTotalSize);
|
||||||
out.writeVLong(phase1Time);
|
out.writeVLong(phase1Time);
|
||||||
|
@ -41,6 +41,7 @@ import org.elasticsearch.common.lease.Releasable;
|
|||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.CancellableThreads;
|
import org.elasticsearch.common.util.CancellableThreads;
|
||||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
@ -64,6 +65,7 @@ import java.io.Closeable;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
@ -95,8 +97,6 @@ public class RecoverySourceHandler {
|
|||||||
private final int chunkSizeInBytes;
|
private final int chunkSizeInBytes;
|
||||||
private final RecoveryTargetHandler recoveryTarget;
|
private final RecoveryTargetHandler recoveryTarget;
|
||||||
|
|
||||||
protected final RecoveryResponse response;
|
|
||||||
|
|
||||||
private final CancellableThreads cancellableThreads = new CancellableThreads() {
|
private final CancellableThreads cancellableThreads = new CancellableThreads() {
|
||||||
@Override
|
@Override
|
||||||
protected void onCancel(String reason, @Nullable Exception suppressedException) {
|
protected void onCancel(String reason, @Nullable Exception suppressedException) {
|
||||||
@ -122,7 +122,6 @@ public class RecoverySourceHandler {
|
|||||||
this.shardId = this.request.shardId().id();
|
this.shardId = this.request.shardId().id();
|
||||||
this.logger = Loggers.getLogger(getClass(), request.shardId(), "recover to " + request.targetNode().getName());
|
this.logger = Loggers.getLogger(getClass(), request.shardId(), "recover to " + request.targetNode().getName());
|
||||||
this.chunkSizeInBytes = fileChunkSizeInBytes;
|
this.chunkSizeInBytes = fileChunkSizeInBytes;
|
||||||
this.response = new RecoveryResponse();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public StartRecoveryRequest getRequest() {
|
public StartRecoveryRequest getRequest() {
|
||||||
@ -149,10 +148,12 @@ public class RecoverySourceHandler {
|
|||||||
final long requiredSeqNoRangeStart;
|
final long requiredSeqNoRangeStart;
|
||||||
final boolean isSequenceNumberBasedRecovery = request.startingSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO &&
|
final boolean isSequenceNumberBasedRecovery = request.startingSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO &&
|
||||||
isTargetSameHistory() && shard.hasCompleteHistoryOperations("peer-recovery", request.startingSeqNo());
|
isTargetSameHistory() && shard.hasCompleteHistoryOperations("peer-recovery", request.startingSeqNo());
|
||||||
|
final SendFileResult sendFileResult;
|
||||||
if (isSequenceNumberBasedRecovery) {
|
if (isSequenceNumberBasedRecovery) {
|
||||||
logger.trace("performing sequence numbers based recovery. starting at [{}]", request.startingSeqNo());
|
logger.trace("performing sequence numbers based recovery. starting at [{}]", request.startingSeqNo());
|
||||||
startingSeqNo = request.startingSeqNo();
|
startingSeqNo = request.startingSeqNo();
|
||||||
requiredSeqNoRangeStart = startingSeqNo;
|
requiredSeqNoRangeStart = startingSeqNo;
|
||||||
|
sendFileResult = SendFileResult.EMPTY;
|
||||||
} else {
|
} else {
|
||||||
final Engine.IndexCommitRef phase1Snapshot;
|
final Engine.IndexCommitRef phase1Snapshot;
|
||||||
try {
|
try {
|
||||||
@ -169,7 +170,7 @@ public class RecoverySourceHandler {
|
|||||||
startingSeqNo = shard.indexSettings().isSoftDeleteEnabled() ? requiredSeqNoRangeStart : 0;
|
startingSeqNo = shard.indexSettings().isSoftDeleteEnabled() ? requiredSeqNoRangeStart : 0;
|
||||||
try {
|
try {
|
||||||
final int estimateNumOps = shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo);
|
final int estimateNumOps = shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo);
|
||||||
phase1(phase1Snapshot.getIndexCommit(), () -> estimateNumOps);
|
sendFileResult = phase1(phase1Snapshot.getIndexCommit(), () -> estimateNumOps);
|
||||||
} catch (final Exception e) {
|
} catch (final Exception e) {
|
||||||
throw new RecoveryEngineException(shard.shardId(), 1, "phase1 failed", e);
|
throw new RecoveryEngineException(shard.shardId(), 1, "phase1 failed", e);
|
||||||
} finally {
|
} finally {
|
||||||
@ -184,9 +185,10 @@ public class RecoverySourceHandler {
|
|||||||
assert requiredSeqNoRangeStart >= startingSeqNo : "requiredSeqNoRangeStart [" + requiredSeqNoRangeStart + "] is lower than ["
|
assert requiredSeqNoRangeStart >= startingSeqNo : "requiredSeqNoRangeStart [" + requiredSeqNoRangeStart + "] is lower than ["
|
||||||
+ startingSeqNo + "]";
|
+ startingSeqNo + "]";
|
||||||
|
|
||||||
|
final TimeValue prepareEngineTime;
|
||||||
try {
|
try {
|
||||||
// For a sequence based recovery, the target can keep its local translog
|
// For a sequence based recovery, the target can keep its local translog
|
||||||
prepareTargetForTranslog(isSequenceNumberBasedRecovery == false,
|
prepareEngineTime = prepareTargetForTranslog(isSequenceNumberBasedRecovery == false,
|
||||||
shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo));
|
shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo));
|
||||||
} catch (final Exception e) {
|
} catch (final Exception e) {
|
||||||
throw new RecoveryEngineException(shard.shardId(), 1, "prepare target for translog failed", e);
|
throw new RecoveryEngineException(shard.shardId(), 1, "prepare target for translog failed", e);
|
||||||
@ -213,21 +215,25 @@ public class RecoverySourceHandler {
|
|||||||
logger.trace("snapshot translog for recovery; current size is [{}]",
|
logger.trace("snapshot translog for recovery; current size is [{}]",
|
||||||
shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo));
|
shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo));
|
||||||
}
|
}
|
||||||
final long targetLocalCheckpoint;
|
final SendSnapshotResult sendSnapshotResult;
|
||||||
try (Translog.Snapshot snapshot = shard.getHistoryOperations("peer-recovery", startingSeqNo)) {
|
try (Translog.Snapshot snapshot = shard.getHistoryOperations("peer-recovery", startingSeqNo)) {
|
||||||
// we have to capture the max_seen_auto_id_timestamp and the max_seq_no_of_updates to make sure that these values
|
// we have to capture the max_seen_auto_id_timestamp and the max_seq_no_of_updates to make sure that these values
|
||||||
// are at least as high as the corresponding values on the primary when any of these operations were executed on it.
|
// are at least as high as the corresponding values on the primary when any of these operations were executed on it.
|
||||||
final long maxSeenAutoIdTimestamp = shard.getMaxSeenAutoIdTimestamp();
|
final long maxSeenAutoIdTimestamp = shard.getMaxSeenAutoIdTimestamp();
|
||||||
final long maxSeqNoOfUpdatesOrDeletes = shard.getMaxSeqNoOfUpdatesOrDeletes();
|
final long maxSeqNoOfUpdatesOrDeletes = shard.getMaxSeqNoOfUpdatesOrDeletes();
|
||||||
targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot,
|
sendSnapshotResult = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot,
|
||||||
maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes);
|
maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e);
|
throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
finalizeRecovery(targetLocalCheckpoint);
|
finalizeRecovery(sendSnapshotResult.targetLocalCheckpoint);
|
||||||
|
final long phase1ThrottlingWaitTime = 0L; // TODO: return the actual throttle time
|
||||||
|
return new RecoveryResponse(sendFileResult.phase1FileNames, sendFileResult.phase1FileSizes,
|
||||||
|
sendFileResult.phase1ExistingFileNames, sendFileResult.phase1ExistingFileSizes, sendFileResult.totalSize,
|
||||||
|
sendFileResult.existingTotalSize, sendFileResult.took.millis(), phase1ThrottlingWaitTime, prepareEngineTime.millis(),
|
||||||
|
sendSnapshotResult.totalOperations, sendSnapshotResult.tookTime.millis());
|
||||||
}
|
}
|
||||||
return response;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isTargetSameHistory() {
|
private boolean isTargetSameHistory() {
|
||||||
@ -276,6 +282,32 @@ public class RecoverySourceHandler {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static final class SendFileResult {
|
||||||
|
final List<String> phase1FileNames;
|
||||||
|
final List<Long> phase1FileSizes;
|
||||||
|
final long totalSize;
|
||||||
|
|
||||||
|
final List<String> phase1ExistingFileNames;
|
||||||
|
final List<Long> phase1ExistingFileSizes;
|
||||||
|
final long existingTotalSize;
|
||||||
|
|
||||||
|
final TimeValue took;
|
||||||
|
|
||||||
|
SendFileResult(List<String> phase1FileNames, List<Long> phase1FileSizes, long totalSize,
|
||||||
|
List<String> phase1ExistingFileNames, List<Long> phase1ExistingFileSizes, long existingTotalSize, TimeValue took) {
|
||||||
|
this.phase1FileNames = phase1FileNames;
|
||||||
|
this.phase1FileSizes = phase1FileSizes;
|
||||||
|
this.totalSize = totalSize;
|
||||||
|
this.phase1ExistingFileNames = phase1ExistingFileNames;
|
||||||
|
this.phase1ExistingFileSizes = phase1ExistingFileSizes;
|
||||||
|
this.existingTotalSize = existingTotalSize;
|
||||||
|
this.took = took;
|
||||||
|
}
|
||||||
|
|
||||||
|
static final SendFileResult EMPTY = new SendFileResult(Collections.emptyList(), Collections.emptyList(), 0L,
|
||||||
|
Collections.emptyList(), Collections.emptyList(), 0L, TimeValue.ZERO);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform phase1 of the recovery operations. Once this {@link IndexCommit}
|
* Perform phase1 of the recovery operations. Once this {@link IndexCommit}
|
||||||
* snapshot has been performed no commit operations (files being fsync'd)
|
* snapshot has been performed no commit operations (files being fsync'd)
|
||||||
@ -285,12 +317,16 @@ public class RecoverySourceHandler {
|
|||||||
* segments that are missing. Only segments that have the same size and
|
* segments that are missing. Only segments that have the same size and
|
||||||
* checksum can be reused
|
* checksum can be reused
|
||||||
*/
|
*/
|
||||||
public void phase1(final IndexCommit snapshot, final Supplier<Integer> translogOps) {
|
public SendFileResult phase1(final IndexCommit snapshot, final Supplier<Integer> translogOps) {
|
||||||
cancellableThreads.checkForCancel();
|
cancellableThreads.checkForCancel();
|
||||||
// Total size of segment files that are recovered
|
// Total size of segment files that are recovered
|
||||||
long totalSize = 0;
|
long totalSize = 0;
|
||||||
// Total size of segment files that were able to be re-used
|
// Total size of segment files that were able to be re-used
|
||||||
long existingTotalSize = 0;
|
long existingTotalSize = 0;
|
||||||
|
final List<String> phase1FileNames = new ArrayList<>();
|
||||||
|
final List<Long> phase1FileSizes = new ArrayList<>();
|
||||||
|
final List<String> phase1ExistingFileNames = new ArrayList<>();
|
||||||
|
final List<Long> phase1ExistingFileSizes = new ArrayList<>();
|
||||||
final Store store = shard.store();
|
final Store store = shard.store();
|
||||||
store.incRef();
|
store.incRef();
|
||||||
try {
|
try {
|
||||||
@ -331,8 +367,8 @@ public class RecoverySourceHandler {
|
|||||||
} else {
|
} else {
|
||||||
final Store.RecoveryDiff diff = recoverySourceMetadata.recoveryDiff(request.metadataSnapshot());
|
final Store.RecoveryDiff diff = recoverySourceMetadata.recoveryDiff(request.metadataSnapshot());
|
||||||
for (StoreFileMetaData md : diff.identical) {
|
for (StoreFileMetaData md : diff.identical) {
|
||||||
response.phase1ExistingFileNames.add(md.name());
|
phase1ExistingFileNames.add(md.name());
|
||||||
response.phase1ExistingFileSizes.add(md.length());
|
phase1ExistingFileSizes.add(md.length());
|
||||||
existingTotalSize += md.length();
|
existingTotalSize += md.length();
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("recovery [phase1]: not recovering [{}], exist in local store and has checksum [{}]," +
|
logger.trace("recovery [phase1]: not recovering [{}], exist in local store and has checksum [{}]," +
|
||||||
@ -350,20 +386,16 @@ public class RecoverySourceHandler {
|
|||||||
} else {
|
} else {
|
||||||
logger.trace("recovery [phase1]: recovering [{}], does not exist in remote", md.name());
|
logger.trace("recovery [phase1]: recovering [{}], does not exist in remote", md.name());
|
||||||
}
|
}
|
||||||
response.phase1FileNames.add(md.name());
|
phase1FileNames.add(md.name());
|
||||||
response.phase1FileSizes.add(md.length());
|
phase1FileSizes.add(md.length());
|
||||||
totalSize += md.length();
|
totalSize += md.length();
|
||||||
}
|
}
|
||||||
|
|
||||||
response.phase1TotalSize = totalSize;
|
|
||||||
response.phase1ExistingTotalSize = existingTotalSize;
|
|
||||||
|
|
||||||
logger.trace("recovery [phase1]: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]",
|
logger.trace("recovery [phase1]: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]",
|
||||||
response.phase1FileNames.size(),
|
phase1FileNames.size(), new ByteSizeValue(totalSize),
|
||||||
new ByteSizeValue(totalSize), response.phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize));
|
phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize));
|
||||||
cancellableThreads.execute(() ->
|
cancellableThreads.execute(() -> recoveryTarget.receiveFileInfo(
|
||||||
recoveryTarget.receiveFileInfo(response.phase1FileNames, response.phase1FileSizes, response.phase1ExistingFileNames,
|
phase1FileNames, phase1FileSizes, phase1ExistingFileNames, phase1ExistingFileSizes, translogOps.get()));
|
||||||
response.phase1ExistingFileSizes, translogOps.get()));
|
|
||||||
// How many bytes we've copied since we last called RateLimiter.pause
|
// How many bytes we've copied since we last called RateLimiter.pause
|
||||||
final Function<StoreFileMetaData, OutputStream> outputStreamFactories =
|
final Function<StoreFileMetaData, OutputStream> outputStreamFactories =
|
||||||
md -> new BufferedOutputStream(new RecoveryOutputStream(md, translogOps), chunkSizeInBytes);
|
md -> new BufferedOutputStream(new RecoveryOutputStream(md, translogOps), chunkSizeInBytes);
|
||||||
@ -417,27 +449,27 @@ public class RecoverySourceHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
final TimeValue took = stopWatch.totalTime();
|
||||||
logger.trace("recovery [phase1]: took [{}]", stopWatch.totalTime());
|
logger.trace("recovery [phase1]: took [{}]", took);
|
||||||
response.phase1Time = stopWatch.totalTime().millis();
|
return new SendFileResult(phase1FileNames, phase1FileSizes, totalSize, phase1ExistingFileNames,
|
||||||
|
phase1ExistingFileSizes, existingTotalSize, took);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RecoverFilesRecoveryException(request.shardId(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), e);
|
throw new RecoverFilesRecoveryException(request.shardId(), phase1FileNames.size(), new ByteSizeValue(totalSize), e);
|
||||||
} finally {
|
} finally {
|
||||||
store.decRef();
|
store.decRef();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException {
|
TimeValue prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException {
|
||||||
StopWatch stopWatch = new StopWatch().start();
|
StopWatch stopWatch = new StopWatch().start();
|
||||||
logger.trace("recovery [phase1]: prepare remote engine for translog");
|
logger.trace("recovery [phase1]: prepare remote engine for translog");
|
||||||
final long startEngineStart = stopWatch.totalTime().millis();
|
|
||||||
// Send a request preparing the new shard's translog to receive operations. This ensures the shard engine is started and disables
|
// Send a request preparing the new shard's translog to receive operations. This ensures the shard engine is started and disables
|
||||||
// garbage collection (not the JVM's GC!) of tombstone deletes.
|
// garbage collection (not the JVM's GC!) of tombstone deletes.
|
||||||
cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps));
|
cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps));
|
||||||
stopWatch.stop();
|
stopWatch.stop();
|
||||||
|
final TimeValue tookTime = stopWatch.totalTime();
|
||||||
response.startTime = stopWatch.totalTime().millis() - startEngineStart;
|
logger.trace("recovery [phase1]: remote engine start took [{}]", tookTime);
|
||||||
logger.trace("recovery [phase1]: remote engine start took [{}]", stopWatch.totalTime());
|
return tookTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -454,102 +486,23 @@ public class RecoverySourceHandler {
|
|||||||
* @param snapshot a snapshot of the translog
|
* @param snapshot a snapshot of the translog
|
||||||
* @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary
|
* @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary
|
||||||
* @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it.
|
* @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it.
|
||||||
* @return the local checkpoint on the target
|
* @return the send snapshot result
|
||||||
*/
|
*/
|
||||||
long phase2(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, final Translog.Snapshot snapshot,
|
SendSnapshotResult phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot,
|
||||||
final long maxSeenAutoIdTimestamp, final long maxSeqNoOfUpdatesOrDeletes)
|
long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException {
|
||||||
throws IOException {
|
assert requiredSeqNoRangeStart <= endingSeqNo + 1:
|
||||||
|
"requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo;
|
||||||
|
assert startingSeqNo <= requiredSeqNoRangeStart :
|
||||||
|
"startingSeqNo " + startingSeqNo + " is larger than requiredSeqNoRangeStart " + requiredSeqNoRangeStart;
|
||||||
if (shard.state() == IndexShardState.CLOSED) {
|
if (shard.state() == IndexShardState.CLOSED) {
|
||||||
throw new IndexShardClosedException(request.shardId());
|
throw new IndexShardClosedException(request.shardId());
|
||||||
}
|
}
|
||||||
cancellableThreads.checkForCancel();
|
|
||||||
|
|
||||||
final StopWatch stopWatch = new StopWatch().start();
|
final StopWatch stopWatch = new StopWatch().start();
|
||||||
|
|
||||||
logger.trace("recovery [phase2]: sending transaction log operations (seq# from [" + startingSeqNo + "], " +
|
logger.trace("recovery [phase2]: sending transaction log operations (seq# from [" + startingSeqNo + "], " +
|
||||||
"required [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "]");
|
"required [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "]");
|
||||||
|
|
||||||
// send all the snapshot's translog operations to the target
|
|
||||||
final SendSnapshotResult result = sendSnapshot(
|
|
||||||
startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes);
|
|
||||||
|
|
||||||
stopWatch.stop();
|
|
||||||
logger.trace("recovery [phase2]: took [{}]", stopWatch.totalTime());
|
|
||||||
response.phase2Time = stopWatch.totalTime().millis();
|
|
||||||
response.phase2Operations = result.totalOperations;
|
|
||||||
return result.targetLocalCheckpoint;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* finalizes the recovery process
|
|
||||||
*/
|
|
||||||
public void finalizeRecovery(final long targetLocalCheckpoint) throws IOException {
|
|
||||||
if (shard.state() == IndexShardState.CLOSED) {
|
|
||||||
throw new IndexShardClosedException(request.shardId());
|
|
||||||
}
|
|
||||||
cancellableThreads.checkForCancel();
|
|
||||||
StopWatch stopWatch = new StopWatch().start();
|
|
||||||
logger.trace("finalizing recovery");
|
|
||||||
/*
|
|
||||||
* Before marking the shard as in-sync we acquire an operation permit. We do this so that there is a barrier between marking a
|
|
||||||
* shard as in-sync and relocating a shard. If we acquire the permit then no relocation handoff can complete before we are done
|
|
||||||
* marking the shard as in-sync. If the relocation handoff holds all the permits then after the handoff completes and we acquire
|
|
||||||
* the permit then the state of the shard will be relocated and this recovery will fail.
|
|
||||||
*/
|
|
||||||
runUnderPrimaryPermit(() -> shard.markAllocationIdAsInSync(request.targetAllocationId(), targetLocalCheckpoint),
|
|
||||||
shardId + " marking " + request.targetAllocationId() + " as in sync", shard, cancellableThreads, logger);
|
|
||||||
final long globalCheckpoint = shard.getGlobalCheckpoint();
|
|
||||||
cancellableThreads.executeIO(() -> recoveryTarget.finalizeRecovery(globalCheckpoint));
|
|
||||||
runUnderPrimaryPermit(() -> shard.updateGlobalCheckpointForShard(request.targetAllocationId(), globalCheckpoint),
|
|
||||||
shardId + " updating " + request.targetAllocationId() + "'s global checkpoint", shard, cancellableThreads, logger);
|
|
||||||
|
|
||||||
if (request.isPrimaryRelocation()) {
|
|
||||||
logger.trace("performing relocation hand-off");
|
|
||||||
// this acquires all IndexShard operation permits and will thus delay new recoveries until it is done
|
|
||||||
cancellableThreads.execute(() -> shard.relocated(recoveryTarget::handoffPrimaryContext));
|
|
||||||
/*
|
|
||||||
* if the recovery process fails after disabling primary mode on the source shard, both relocation source and
|
|
||||||
* target are failed (see {@link IndexShard#updateRoutingEntry}).
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
stopWatch.stop();
|
|
||||||
logger.trace("finalizing recovery took [{}]", stopWatch.totalTime());
|
|
||||||
}
|
|
||||||
|
|
||||||
static class SendSnapshotResult {
|
|
||||||
|
|
||||||
final long targetLocalCheckpoint;
|
|
||||||
final int totalOperations;
|
|
||||||
|
|
||||||
SendSnapshotResult(final long targetLocalCheckpoint, final int totalOperations) {
|
|
||||||
this.targetLocalCheckpoint = targetLocalCheckpoint;
|
|
||||||
this.totalOperations = totalOperations;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send the given snapshot's operations with a sequence number greater than the specified staring sequence number to this handler's
|
|
||||||
* target node.
|
|
||||||
* <p>
|
|
||||||
* Operations are bulked into a single request depending on an operation count limit or size-in-bytes limit.
|
|
||||||
*
|
|
||||||
* @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent
|
|
||||||
* @param requiredSeqNoRangeStart the lower sequence number of the required range
|
|
||||||
* @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive)
|
|
||||||
* @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the
|
|
||||||
* total number of operations sent
|
|
||||||
* @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary
|
|
||||||
* @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it.
|
|
||||||
* @throws IOException if an I/O exception occurred reading the translog snapshot
|
|
||||||
*/
|
|
||||||
protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo,
|
|
||||||
final Translog.Snapshot snapshot, final long maxSeenAutoIdTimestamp,
|
|
||||||
final long maxSeqNoOfUpdatesOrDeletes) throws IOException {
|
|
||||||
assert requiredSeqNoRangeStart <= endingSeqNo + 1:
|
|
||||||
"requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo;
|
|
||||||
assert startingSeqNo <= requiredSeqNoRangeStart :
|
|
||||||
"startingSeqNo " + startingSeqNo + " is larger than requiredSeqNoRangeStart " + requiredSeqNoRangeStart;
|
|
||||||
int ops = 0;
|
int ops = 0;
|
||||||
long size = 0;
|
long size = 0;
|
||||||
int skippedOps = 0;
|
int skippedOps = 0;
|
||||||
@ -615,7 +568,58 @@ public class RecoverySourceHandler {
|
|||||||
|
|
||||||
logger.trace("sent final batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps);
|
logger.trace("sent final batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps);
|
||||||
|
|
||||||
return new SendSnapshotResult(targetLocalCheckpoint.get(), totalSentOps);
|
stopWatch.stop();
|
||||||
|
final TimeValue tookTime = stopWatch.totalTime();
|
||||||
|
logger.trace("recovery [phase2]: took [{}]", tookTime);
|
||||||
|
return new SendSnapshotResult(targetLocalCheckpoint.get(), totalSentOps, tookTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* finalizes the recovery process
|
||||||
|
*/
|
||||||
|
public void finalizeRecovery(final long targetLocalCheckpoint) throws IOException {
|
||||||
|
if (shard.state() == IndexShardState.CLOSED) {
|
||||||
|
throw new IndexShardClosedException(request.shardId());
|
||||||
|
}
|
||||||
|
cancellableThreads.checkForCancel();
|
||||||
|
StopWatch stopWatch = new StopWatch().start();
|
||||||
|
logger.trace("finalizing recovery");
|
||||||
|
/*
|
||||||
|
* Before marking the shard as in-sync we acquire an operation permit. We do this so that there is a barrier between marking a
|
||||||
|
* shard as in-sync and relocating a shard. If we acquire the permit then no relocation handoff can complete before we are done
|
||||||
|
* marking the shard as in-sync. If the relocation handoff holds all the permits then after the handoff completes and we acquire
|
||||||
|
* the permit then the state of the shard will be relocated and this recovery will fail.
|
||||||
|
*/
|
||||||
|
runUnderPrimaryPermit(() -> shard.markAllocationIdAsInSync(request.targetAllocationId(), targetLocalCheckpoint),
|
||||||
|
shardId + " marking " + request.targetAllocationId() + " as in sync", shard, cancellableThreads, logger);
|
||||||
|
final long globalCheckpoint = shard.getGlobalCheckpoint();
|
||||||
|
cancellableThreads.executeIO(() -> recoveryTarget.finalizeRecovery(globalCheckpoint));
|
||||||
|
runUnderPrimaryPermit(() -> shard.updateGlobalCheckpointForShard(request.targetAllocationId(), globalCheckpoint),
|
||||||
|
shardId + " updating " + request.targetAllocationId() + "'s global checkpoint", shard, cancellableThreads, logger);
|
||||||
|
|
||||||
|
if (request.isPrimaryRelocation()) {
|
||||||
|
logger.trace("performing relocation hand-off");
|
||||||
|
// this acquires all IndexShard operation permits and will thus delay new recoveries until it is done
|
||||||
|
cancellableThreads.execute(() -> shard.relocated(recoveryTarget::handoffPrimaryContext));
|
||||||
|
/*
|
||||||
|
* if the recovery process fails after disabling primary mode on the source shard, both relocation source and
|
||||||
|
* target are failed (see {@link IndexShard#updateRoutingEntry}).
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
stopWatch.stop();
|
||||||
|
logger.trace("finalizing recovery took [{}]", stopWatch.totalTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class SendSnapshotResult {
|
||||||
|
final long targetLocalCheckpoint;
|
||||||
|
final int totalOperations;
|
||||||
|
final TimeValue tookTime;
|
||||||
|
|
||||||
|
SendSnapshotResult(final long targetLocalCheckpoint, final int totalOperations, final TimeValue tookTime) {
|
||||||
|
this.targetLocalCheckpoint = targetLocalCheckpoint;
|
||||||
|
this.totalOperations = totalOperations;
|
||||||
|
this.tookTime = tookTime;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -111,62 +111,63 @@ public class RepositoriesService implements ClusterStateApplier {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
clusterService.submitStateUpdateTask(request.cause, new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, registrationListener) {
|
clusterService.submitStateUpdateTask(request.cause,
|
||||||
@Override
|
new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, registrationListener) {
|
||||||
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
|
@Override
|
||||||
return new ClusterStateUpdateResponse(acknowledged);
|
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
|
||||||
}
|
return new ClusterStateUpdateResponse(acknowledged);
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterState execute(ClusterState currentState) {
|
|
||||||
ensureRepositoryNotInUse(currentState, request.name);
|
|
||||||
MetaData metaData = currentState.metaData();
|
|
||||||
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
|
|
||||||
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
|
|
||||||
if (repositories == null) {
|
|
||||||
logger.info("put repository [{}]", request.name);
|
|
||||||
repositories = new RepositoriesMetaData(
|
|
||||||
Collections.singletonList(new RepositoryMetaData(request.name, request.type, request.settings)));
|
|
||||||
} else {
|
|
||||||
boolean found = false;
|
|
||||||
List<RepositoryMetaData> repositoriesMetaData = new ArrayList<>(repositories.repositories().size() + 1);
|
|
||||||
|
|
||||||
for (RepositoryMetaData repositoryMetaData : repositories.repositories()) {
|
|
||||||
if (repositoryMetaData.name().equals(newRepositoryMetaData.name())) {
|
|
||||||
if (newRepositoryMetaData.equals(repositoryMetaData)) {
|
|
||||||
// Previous version is the same as this one no update is needed.
|
|
||||||
return currentState;
|
|
||||||
}
|
|
||||||
found = true;
|
|
||||||
repositoriesMetaData.add(newRepositoryMetaData);
|
|
||||||
} else {
|
|
||||||
repositoriesMetaData.add(repositoryMetaData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!found) {
|
|
||||||
logger.info("put repository [{}]", request.name);
|
|
||||||
repositoriesMetaData.add(new RepositoryMetaData(request.name, request.type, request.settings));
|
|
||||||
} else {
|
|
||||||
logger.info("update repository [{}]", request.name);
|
|
||||||
}
|
|
||||||
repositories = new RepositoriesMetaData(repositoriesMetaData);
|
|
||||||
}
|
}
|
||||||
mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories);
|
|
||||||
return ClusterState.builder(currentState).metaData(mdBuilder).build();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFailure(String source, Exception e) {
|
public ClusterState execute(ClusterState currentState) {
|
||||||
logger.warn(() -> new ParameterizedMessage("failed to create repository [{}]", request.name), e);
|
ensureRepositoryNotInUse(currentState, request.name);
|
||||||
super.onFailure(source, e);
|
MetaData metaData = currentState.metaData();
|
||||||
}
|
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
|
||||||
|
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
|
||||||
|
if (repositories == null) {
|
||||||
|
logger.info("put repository [{}]", request.name);
|
||||||
|
repositories = new RepositoriesMetaData(
|
||||||
|
Collections.singletonList(new RepositoryMetaData(request.name, request.type, request.settings)));
|
||||||
|
} else {
|
||||||
|
boolean found = false;
|
||||||
|
List<RepositoryMetaData> repositoriesMetaData = new ArrayList<>(repositories.repositories().size() + 1);
|
||||||
|
|
||||||
@Override
|
for (RepositoryMetaData repositoryMetaData : repositories.repositories()) {
|
||||||
public boolean mustAck(DiscoveryNode discoveryNode) {
|
if (repositoryMetaData.name().equals(newRepositoryMetaData.name())) {
|
||||||
// repository is created on both master and data nodes
|
if (newRepositoryMetaData.equals(repositoryMetaData)) {
|
||||||
return discoveryNode.isMasterNode() || discoveryNode.isDataNode();
|
// Previous version is the same as this one no update is needed.
|
||||||
}
|
return currentState;
|
||||||
});
|
}
|
||||||
|
found = true;
|
||||||
|
repositoriesMetaData.add(newRepositoryMetaData);
|
||||||
|
} else {
|
||||||
|
repositoriesMetaData.add(repositoryMetaData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) {
|
||||||
|
logger.info("put repository [{}]", request.name);
|
||||||
|
repositoriesMetaData.add(new RepositoryMetaData(request.name, request.type, request.settings));
|
||||||
|
} else {
|
||||||
|
logger.info("update repository [{}]", request.name);
|
||||||
|
}
|
||||||
|
repositories = new RepositoriesMetaData(repositoriesMetaData);
|
||||||
|
}
|
||||||
|
mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories);
|
||||||
|
return ClusterState.builder(currentState).metaData(mdBuilder).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(String source, Exception e) {
|
||||||
|
logger.warn(() -> new ParameterizedMessage("failed to create repository [{}]", request.name), e);
|
||||||
|
super.onFailure(source, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean mustAck(DiscoveryNode discoveryNode) {
|
||||||
|
// repository is created on both master and data nodes
|
||||||
|
return discoveryNode.isMasterNode() || discoveryNode.isDataNode();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Unregisters repository in the cluster
|
* Unregisters repository in the cluster
|
||||||
@ -323,7 +324,8 @@ public class RepositoriesService implements ClusterStateApplier {
|
|||||||
} catch (RepositoryException ex) {
|
} catch (RepositoryException ex) {
|
||||||
// TODO: this catch is bogus, it means the old repo is already closed,
|
// TODO: this catch is bogus, it means the old repo is already closed,
|
||||||
// but we have nothing to replace it
|
// but we have nothing to replace it
|
||||||
logger.warn(() -> new ParameterizedMessage("failed to change repository [{}]", repositoryMetaData.name()), ex);
|
logger.warn(() -> new ParameterizedMessage("failed to change repository [{}]",
|
||||||
|
repositoryMetaData.name()), ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -411,7 +413,8 @@ public class RepositoriesService implements ClusterStateApplier {
|
|||||||
repository.start();
|
repository.start();
|
||||||
return repository;
|
return repository;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.warn(new ParameterizedMessage("failed to create repository [{}][{}]", repositoryMetaData.type(), repositoryMetaData.name()), e);
|
logger.warn(new ParameterizedMessage("failed to create repository [{}][{}]",
|
||||||
|
repositoryMetaData.type(), repositoryMetaData.name()), e);
|
||||||
throw new RepositoryException(repositoryMetaData.name(), "failed to create repository", e);
|
throw new RepositoryException(repositoryMetaData.name(), "failed to create repository", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -216,7 +216,8 @@ public interface Repository extends LifecycleComponent {
|
|||||||
* @param snapshotShardId shard id (in the snapshot)
|
* @param snapshotShardId shard id (in the snapshot)
|
||||||
* @param recoveryState recovery state
|
* @param recoveryState recovery state
|
||||||
*/
|
*/
|
||||||
void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState);
|
void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId,
|
||||||
|
ShardId snapshotShardId, RecoveryState recoveryState);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve shard snapshot status for the stored snapshot
|
* Retrieve shard snapshot status for the stored snapshot
|
||||||
|
@ -59,11 +59,13 @@ public class VerifyNodeRepositoryAction {
|
|||||||
|
|
||||||
private final RepositoriesService repositoriesService;
|
private final RepositoriesService repositoriesService;
|
||||||
|
|
||||||
public VerifyNodeRepositoryAction(TransportService transportService, ClusterService clusterService, RepositoriesService repositoriesService) {
|
public VerifyNodeRepositoryAction(TransportService transportService, ClusterService clusterService,
|
||||||
|
RepositoriesService repositoriesService) {
|
||||||
this.transportService = transportService;
|
this.transportService = transportService;
|
||||||
this.clusterService = clusterService;
|
this.clusterService = clusterService;
|
||||||
this.repositoriesService = repositoriesService;
|
this.repositoriesService = repositoriesService;
|
||||||
transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SNAPSHOT, new VerifyNodeRepositoryRequestHandler());
|
transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SNAPSHOT,
|
||||||
|
new VerifyNodeRepositoryRequestHandler());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verify(String repository, String verificationToken, final ActionListener<VerifyResponse> listener) {
|
public void verify(String repository, String verificationToken, final ActionListener<VerifyResponse> listener) {
|
||||||
@ -90,28 +92,31 @@ public class VerifyNodeRepositoryAction {
|
|||||||
finishVerification(listener, nodes, errors);
|
finishVerification(listener, nodes, errors);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
transportService.sendRequest(node, ACTION_NAME, new VerifyNodeRepositoryRequest(repository, verificationToken), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
|
transportService.sendRequest(node, ACTION_NAME, new VerifyNodeRepositoryRequest(repository, verificationToken),
|
||||||
@Override
|
new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
|
||||||
public void handleResponse(TransportResponse.Empty response) {
|
@Override
|
||||||
if (counter.decrementAndGet() == 0) {
|
public void handleResponse(TransportResponse.Empty response) {
|
||||||
finishVerification(listener, nodes, errors);
|
if (counter.decrementAndGet() == 0) {
|
||||||
|
finishVerification(listener, nodes, errors);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void handleException(TransportException exp) {
|
public void handleException(TransportException exp) {
|
||||||
errors.add(new VerificationFailure(node.getId(), exp));
|
errors.add(new VerificationFailure(node.getId(), exp));
|
||||||
if (counter.decrementAndGet() == 0) {
|
if (counter.decrementAndGet() == 0) {
|
||||||
finishVerification(listener, nodes, errors);
|
finishVerification(listener, nodes, errors);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void finishVerification(ActionListener<VerifyResponse> listener, List<DiscoveryNode> nodes, CopyOnWriteArrayList<VerificationFailure> errors) {
|
public void finishVerification(ActionListener<VerifyResponse> listener, List<DiscoveryNode> nodes,
|
||||||
listener.onResponse(new RepositoriesService.VerifyResponse(nodes.toArray(new DiscoveryNode[nodes.size()]), errors.toArray(new VerificationFailure[errors.size()])));
|
CopyOnWriteArrayList<VerificationFailure> errors) {
|
||||||
|
listener.onResponse(new RepositoriesService.VerifyResponse(nodes.toArray(new DiscoveryNode[nodes.size()]),
|
||||||
|
errors.toArray(new VerificationFailure[errors.size()])));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doVerify(String repositoryName, String verificationToken, DiscoveryNode localNode) {
|
private void doVerify(String repositoryName, String verificationToken, DiscoveryNode localNode) {
|
||||||
|
@ -479,12 +479,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
// we'll ignore that and accept that cleanup didn't fully succeed.
|
// we'll ignore that and accept that cleanup didn't fully succeed.
|
||||||
// since we are using UUIDs for path names, this won't be an issue for
|
// since we are using UUIDs for path names, this won't be an issue for
|
||||||
// snapshotting indices of the same name
|
// snapshotting indices of the same name
|
||||||
logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
|
logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, " +
|
||||||
"its index folder due to the directory not being empty.", metadata.name(), indexId), dnee);
|
"but failed to clean up its index folder due to the directory not being empty.", metadata.name(), indexId), dnee);
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
// a different IOException occurred while trying to delete - will just log the issue for now
|
// a different IOException occurred while trying to delete - will just log the issue for now
|
||||||
logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
|
logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, " +
|
||||||
"its index folder.", metadata.name(), indexId), ioe);
|
"but failed to clean up its index folder.", metadata.name(), indexId), ioe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (IOException | ResourceNotFoundException ex) {
|
} catch (IOException | ResourceNotFoundException ex) {
|
||||||
@ -524,7 +524,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
try {
|
try {
|
||||||
indexMetaDataFormat.delete(indexMetaDataBlobContainer, snapshotId.getUUID());
|
indexMetaDataFormat.delete(indexMetaDataBlobContainer, snapshotId.getUUID());
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
logger.warn(() -> new ParameterizedMessage("[{}] failed to delete metadata for index [{}]", snapshotId, indexId.getName()), ex);
|
logger.warn(() -> new ParameterizedMessage("[{}] failed to delete metadata for index [{}]",
|
||||||
|
snapshotId, indexId.getName()), ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -861,7 +862,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) {
|
public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId,
|
||||||
|
RecoveryState recoveryState) {
|
||||||
final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, indexId, snapshotShardId, recoveryState);
|
final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, indexId, snapshotShardId, recoveryState);
|
||||||
try {
|
try {
|
||||||
snapshotContext.restore();
|
snapshotContext.restore();
|
||||||
@ -898,12 +900,14 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
testBlobContainer.writeBlob("data-" + localNode.getId() + ".dat", stream, bytes.length(), true);
|
testBlobContainer.writeBlob("data-" + localNode.getId() + ".dat", stream, bytes.length(), true);
|
||||||
}
|
}
|
||||||
} catch (IOException exp) {
|
} catch (IOException exp) {
|
||||||
throw new RepositoryVerificationException(metadata.name(), "store location [" + blobStore() + "] is not accessible on the node [" + localNode + "]", exp);
|
throw new RepositoryVerificationException(metadata.name(), "store location [" + blobStore() +
|
||||||
|
"] is not accessible on the node [" + localNode + "]", exp);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new RepositoryVerificationException(metadata.name(), "a file written by master to the store [" + blobStore() + "] cannot be accessed on the node [" + localNode + "]. "
|
throw new RepositoryVerificationException(metadata.name(), "a file written by master to the store [" + blobStore() +
|
||||||
+ "This might indicate that the store [" + blobStore() + "] is not shared between this node and the master node or "
|
"] cannot be accessed on the node [" + localNode + "]. " +
|
||||||
+ "that permissions on the store don't allow reading files written by the master node");
|
"This might indicate that the store [" + blobStore() + "] is not shared between this node and the master node or " +
|
||||||
|
"that permissions on the store don't allow reading files written by the master node");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -945,7 +949,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
Context(SnapshotId snapshotId, IndexId indexId, ShardId shardId, ShardId snapshotShardId) {
|
Context(SnapshotId snapshotId, IndexId indexId, ShardId shardId, ShardId snapshotShardId) {
|
||||||
this.snapshotId = snapshotId;
|
this.snapshotId = snapshotId;
|
||||||
this.shardId = shardId;
|
this.shardId = shardId;
|
||||||
blobContainer = blobStore().blobContainer(basePath().add("indices").add(indexId.getId()).add(Integer.toString(snapshotShardId.getId())));
|
blobContainer = blobStore().blobContainer(basePath().add("indices").add(indexId.getId())
|
||||||
|
.add(Integer.toString(snapshotShardId.getId())));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1235,7 +1240,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
// in a bwc compatible way.
|
// in a bwc compatible way.
|
||||||
maybeRecalculateMetadataHash(blobContainer, fileInfo, metadata);
|
maybeRecalculateMetadataHash(blobContainer, fileInfo, metadata);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blob for file [{}] [{}]", shardId, fileInfo.physicalName(), fileInfo.metadata()), e);
|
logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blob for file [{}] [{}]",
|
||||||
|
shardId, fileInfo.physicalName(), fileInfo.metadata()), e);
|
||||||
}
|
}
|
||||||
if (fileInfo.isSame(md) && snapshotFileExistsInBlobs(fileInfo, blobs)) {
|
if (fileInfo.isSame(md) && snapshotFileExistsInBlobs(fileInfo, blobs)) {
|
||||||
// a commit point file with the same name, size and checksum was already copied to repository
|
// a commit point file with the same name, size and checksum was already copied to repository
|
||||||
@ -1253,7 +1259,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
indexIncrementalFileCount++;
|
indexIncrementalFileCount++;
|
||||||
indexIncrementalSize += md.length();
|
indexIncrementalSize += md.length();
|
||||||
// create a new FileInfo
|
// create a new FileInfo
|
||||||
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize());
|
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo =
|
||||||
|
new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize());
|
||||||
indexCommitPointFiles.add(snapshotFileInfo);
|
indexCommitPointFiles.add(snapshotFileInfo);
|
||||||
filesToSnapshot.add(snapshotFileInfo);
|
filesToSnapshot.add(snapshotFileInfo);
|
||||||
} else {
|
} else {
|
||||||
@ -1411,7 +1418,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
* The new logic for StoreFileMetaData reads the entire {@code .si} and {@code segments.n} files to strengthen the
|
* The new logic for StoreFileMetaData reads the entire {@code .si} and {@code segments.n} files to strengthen the
|
||||||
* comparison of the files on a per-segment / per-commit level.
|
* comparison of the files on a per-segment / per-commit level.
|
||||||
*/
|
*/
|
||||||
private static void maybeRecalculateMetadataHash(final BlobContainer blobContainer, final BlobStoreIndexShardSnapshot.FileInfo fileInfo, Store.MetadataSnapshot snapshot) throws Exception {
|
private static void maybeRecalculateMetadataHash(final BlobContainer blobContainer, final BlobStoreIndexShardSnapshot.FileInfo fileInfo,
|
||||||
|
Store.MetadataSnapshot snapshot) throws Exception {
|
||||||
final StoreFileMetaData metadata;
|
final StoreFileMetaData metadata;
|
||||||
if (fileInfo != null && (metadata = snapshot.get(fileInfo.physicalName())) != null) {
|
if (fileInfo != null && (metadata = snapshot.get(fileInfo.physicalName())) != null) {
|
||||||
if (metadata.hash().length > 0 && fileInfo.metadata().hash().length == 0) {
|
if (metadata.hash().length > 0 && fileInfo.metadata().hash().length == 0) {
|
||||||
@ -1509,7 +1517,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
logger.trace("[{}] [{}] restoring from to an empty shard", shardId, snapshotId);
|
logger.trace("[{}] [{}] restoring from to an empty shard", shardId, snapshotId);
|
||||||
recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY;
|
recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.warn(() -> new ParameterizedMessage("{} Can't read metadata from store, will not reuse any local file while restoring", shardId), e);
|
logger.warn(() -> new ParameterizedMessage("{} Can't read metadata from store, will not reuse any " +
|
||||||
|
"local file while restoring", shardId), e);
|
||||||
recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY;
|
recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1525,7 +1534,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
maybeRecalculateMetadataHash(blobContainer, fileInfo, recoveryTargetMetadata);
|
maybeRecalculateMetadataHash(blobContainer, fileInfo, recoveryTargetMetadata);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// if the index is broken we might not be able to read it
|
// if the index is broken we might not be able to read it
|
||||||
logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blog for file [{}] [{}]", shardId, fileInfo.physicalName(), fileInfo.metadata()), e);
|
logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blog for file [{}] [{}]",
|
||||||
|
shardId, fileInfo.physicalName(), fileInfo.metadata()), e);
|
||||||
}
|
}
|
||||||
snapshotMetaData.put(fileInfo.metadata().name(), fileInfo.metadata());
|
snapshotMetaData.put(fileInfo.metadata().name(), fileInfo.metadata());
|
||||||
fileInfos.put(fileInfo.metadata().name(), fileInfo);
|
fileInfos.put(fileInfo.metadata().name(), fileInfo);
|
||||||
@ -1543,7 +1553,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
BlobStoreIndexShardSnapshot.FileInfo fileInfo = fileInfos.get(md.name());
|
BlobStoreIndexShardSnapshot.FileInfo fileInfo = fileInfos.get(md.name());
|
||||||
recoveryState.getIndex().addFileDetail(fileInfo.name(), fileInfo.length(), true);
|
recoveryState.getIndex().addFileDetail(fileInfo.name(), fileInfo.length(), true);
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("[{}] [{}] not_recovering [{}] from [{}], exists in local store and is same", shardId, snapshotId, fileInfo.physicalName(), fileInfo.name());
|
logger.trace("[{}] [{}] not_recovering [{}] from [{}], exists in local store and is same",
|
||||||
|
shardId, snapshotId, fileInfo.physicalName(), fileInfo.name());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1634,7 +1645,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||||||
stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreRateLimitingTimeInNanos::inc);
|
stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreRateLimitingTimeInNanos::inc);
|
||||||
}
|
}
|
||||||
|
|
||||||
try (IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) {
|
try (IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(),
|
||||||
|
fileInfo.metadata(), IOContext.DEFAULT)) {
|
||||||
final byte[] buffer = new byte[BUFFER_SIZE];
|
final byte[] buffer = new byte[BUFFER_SIZE];
|
||||||
int length;
|
int length;
|
||||||
while ((length = stream.read(buffer)) > 0) {
|
while ((length = stream.read(buffer)) > 0) {
|
||||||
|
@ -43,7 +43,8 @@ import java.util.function.Function;
|
|||||||
* <dl>
|
* <dl>
|
||||||
* <dt>{@code location}</dt><dd>Path to the root of repository. This is mandatory parameter.</dd>
|
* <dt>{@code location}</dt><dd>Path to the root of repository. This is mandatory parameter.</dd>
|
||||||
* <dt>{@code concurrent_streams}</dt><dd>Number of concurrent read/write stream (per repository on each node). Defaults to 5.</dd>
|
* <dt>{@code concurrent_streams}</dt><dd>Number of concurrent read/write stream (per repository on each node). Defaults to 5.</dd>
|
||||||
* <dt>{@code chunk_size}</dt><dd>Large file can be divided into chunks. This parameter specifies the chunk size. Defaults to not chucked.</dd>
|
* <dt>{@code chunk_size}</dt><dd>Large file can be divided into chunks. This parameter specifies the chunk size.
|
||||||
|
* Defaults to not chucked.</dd>
|
||||||
* <dt>{@code compress}</dt><dd>If set to true metadata files will be stored compressed. Defaults to false.</dd>
|
* <dt>{@code compress}</dt><dd>If set to true metadata files will be stored compressed. Defaults to false.</dd>
|
||||||
* </dl>
|
* </dl>
|
||||||
*/
|
*/
|
||||||
|
@ -122,7 +122,8 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
client.admin().indices().stats(indicesStatsRequest, new RestResponseListener<IndicesStatsResponse>(channel) {
|
client.admin().indices().stats(indicesStatsRequest, new RestResponseListener<IndicesStatsResponse>(channel) {
|
||||||
@Override
|
@Override
|
||||||
public RestResponse buildResponse(IndicesStatsResponse indicesStatsResponse) throws Exception {
|
public RestResponse buildResponse(IndicesStatsResponse indicesStatsResponse) throws Exception {
|
||||||
Table tab = buildTable(request, concreteIndices, clusterHealthResponse, indicesStatsResponse, state.metaData());
|
Table tab = buildTable(request, concreteIndices, clusterHealthResponse,
|
||||||
|
indicesStatsResponse, state.metaData());
|
||||||
return RestTable.buildResponse(tab, channel);
|
return RestTable.buildResponse(tab, channel);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -168,28 +169,36 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
table.addCell("completion.size", "sibling:pri;alias:cs,completionSize;default:false;text-align:right;desc:size of completion");
|
table.addCell("completion.size", "sibling:pri;alias:cs,completionSize;default:false;text-align:right;desc:size of completion");
|
||||||
table.addCell("pri.completion.size", "default:false;text-align:right;desc:size of completion");
|
table.addCell("pri.completion.size", "default:false;text-align:right;desc:size of completion");
|
||||||
|
|
||||||
table.addCell("fielddata.memory_size", "sibling:pri;alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache");
|
table.addCell("fielddata.memory_size",
|
||||||
|
"sibling:pri;alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache");
|
||||||
table.addCell("pri.fielddata.memory_size", "default:false;text-align:right;desc:used fielddata cache");
|
table.addCell("pri.fielddata.memory_size", "default:false;text-align:right;desc:used fielddata cache");
|
||||||
|
|
||||||
table.addCell("fielddata.evictions", "sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
|
table.addCell("fielddata.evictions",
|
||||||
|
"sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
|
||||||
table.addCell("pri.fielddata.evictions", "default:false;text-align:right;desc:fielddata evictions");
|
table.addCell("pri.fielddata.evictions", "default:false;text-align:right;desc:fielddata evictions");
|
||||||
|
|
||||||
table.addCell("query_cache.memory_size", "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
|
table.addCell("query_cache.memory_size",
|
||||||
|
"sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
|
||||||
table.addCell("pri.query_cache.memory_size", "default:false;text-align:right;desc:used query cache");
|
table.addCell("pri.query_cache.memory_size", "default:false;text-align:right;desc:used query cache");
|
||||||
|
|
||||||
table.addCell("query_cache.evictions", "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
|
table.addCell("query_cache.evictions",
|
||||||
|
"sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
|
||||||
table.addCell("pri.query_cache.evictions", "default:false;text-align:right;desc:query cache evictions");
|
table.addCell("pri.query_cache.evictions", "default:false;text-align:right;desc:query cache evictions");
|
||||||
|
|
||||||
table.addCell("request_cache.memory_size", "sibling:pri;alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
|
table.addCell("request_cache.memory_size",
|
||||||
|
"sibling:pri;alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
|
||||||
table.addCell("pri.request_cache.memory_size", "default:false;text-align:right;desc:used request cache");
|
table.addCell("pri.request_cache.memory_size", "default:false;text-align:right;desc:used request cache");
|
||||||
|
|
||||||
table.addCell("request_cache.evictions", "sibling:pri;alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
|
table.addCell("request_cache.evictions",
|
||||||
|
"sibling:pri;alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
|
||||||
table.addCell("pri.request_cache.evictions", "default:false;text-align:right;desc:request cache evictions");
|
table.addCell("pri.request_cache.evictions", "default:false;text-align:right;desc:request cache evictions");
|
||||||
|
|
||||||
table.addCell("request_cache.hit_count", "sibling:pri;alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit count");
|
table.addCell("request_cache.hit_count",
|
||||||
|
"sibling:pri;alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit count");
|
||||||
table.addCell("pri.request_cache.hit_count", "default:false;text-align:right;desc:request cache hit count");
|
table.addCell("pri.request_cache.hit_count", "default:false;text-align:right;desc:request cache hit count");
|
||||||
|
|
||||||
table.addCell("request_cache.miss_count", "sibling:pri;alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss count");
|
table.addCell("request_cache.miss_count",
|
||||||
|
"sibling:pri;alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss count");
|
||||||
table.addCell("pri.request_cache.miss_count", "default:false;text-align:right;desc:request cache miss count");
|
table.addCell("pri.request_cache.miss_count", "default:false;text-align:right;desc:request cache miss count");
|
||||||
|
|
||||||
table.addCell("flush.total", "sibling:pri;alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");
|
table.addCell("flush.total", "sibling:pri;alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");
|
||||||
@ -207,49 +216,64 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
table.addCell("get.total", "sibling:pri;alias:gto,getTotal;default:false;text-align:right;desc:number of get ops");
|
table.addCell("get.total", "sibling:pri;alias:gto,getTotal;default:false;text-align:right;desc:number of get ops");
|
||||||
table.addCell("pri.get.total", "default:false;text-align:right;desc:number of get ops");
|
table.addCell("pri.get.total", "default:false;text-align:right;desc:number of get ops");
|
||||||
|
|
||||||
table.addCell("get.exists_time", "sibling:pri;alias:geti,getExistsTime;default:false;text-align:right;desc:time spent in successful gets");
|
table.addCell("get.exists_time",
|
||||||
|
"sibling:pri;alias:geti,getExistsTime;default:false;text-align:right;desc:time spent in successful gets");
|
||||||
table.addCell("pri.get.exists_time", "default:false;text-align:right;desc:time spent in successful gets");
|
table.addCell("pri.get.exists_time", "default:false;text-align:right;desc:time spent in successful gets");
|
||||||
|
|
||||||
table.addCell("get.exists_total", "sibling:pri;alias:geto,getExistsTotal;default:false;text-align:right;desc:number of successful gets");
|
table.addCell("get.exists_total",
|
||||||
|
"sibling:pri;alias:geto,getExistsTotal;default:false;text-align:right;desc:number of successful gets");
|
||||||
table.addCell("pri.get.exists_total", "default:false;text-align:right;desc:number of successful gets");
|
table.addCell("pri.get.exists_total", "default:false;text-align:right;desc:number of successful gets");
|
||||||
|
|
||||||
table.addCell("get.missing_time", "sibling:pri;alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets");
|
table.addCell("get.missing_time",
|
||||||
|
"sibling:pri;alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets");
|
||||||
table.addCell("pri.get.missing_time", "default:false;text-align:right;desc:time spent in failed gets");
|
table.addCell("pri.get.missing_time", "default:false;text-align:right;desc:time spent in failed gets");
|
||||||
|
|
||||||
table.addCell("get.missing_total", "sibling:pri;alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets");
|
table.addCell("get.missing_total",
|
||||||
|
"sibling:pri;alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets");
|
||||||
table.addCell("pri.get.missing_total", "default:false;text-align:right;desc:number of failed gets");
|
table.addCell("pri.get.missing_total", "default:false;text-align:right;desc:number of failed gets");
|
||||||
|
|
||||||
table.addCell("indexing.delete_current", "sibling:pri;alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions");
|
table.addCell("indexing.delete_current",
|
||||||
|
"sibling:pri;alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions");
|
||||||
table.addCell("pri.indexing.delete_current", "default:false;text-align:right;desc:number of current deletions");
|
table.addCell("pri.indexing.delete_current", "default:false;text-align:right;desc:number of current deletions");
|
||||||
|
|
||||||
table.addCell("indexing.delete_time", "sibling:pri;alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions");
|
table.addCell("indexing.delete_time",
|
||||||
|
"sibling:pri;alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions");
|
||||||
table.addCell("pri.indexing.delete_time", "default:false;text-align:right;desc:time spent in deletions");
|
table.addCell("pri.indexing.delete_time", "default:false;text-align:right;desc:time spent in deletions");
|
||||||
|
|
||||||
table.addCell("indexing.delete_total", "sibling:pri;alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops");
|
table.addCell("indexing.delete_total",
|
||||||
|
"sibling:pri;alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops");
|
||||||
table.addCell("pri.indexing.delete_total", "default:false;text-align:right;desc:number of delete ops");
|
table.addCell("pri.indexing.delete_total", "default:false;text-align:right;desc:number of delete ops");
|
||||||
|
|
||||||
table.addCell("indexing.index_current", "sibling:pri;alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops");
|
table.addCell("indexing.index_current",
|
||||||
|
"sibling:pri;alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops");
|
||||||
table.addCell("pri.indexing.index_current", "default:false;text-align:right;desc:number of current indexing ops");
|
table.addCell("pri.indexing.index_current", "default:false;text-align:right;desc:number of current indexing ops");
|
||||||
|
|
||||||
table.addCell("indexing.index_time", "sibling:pri;alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing");
|
table.addCell("indexing.index_time",
|
||||||
|
"sibling:pri;alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing");
|
||||||
table.addCell("pri.indexing.index_time", "default:false;text-align:right;desc:time spent in indexing");
|
table.addCell("pri.indexing.index_time", "default:false;text-align:right;desc:time spent in indexing");
|
||||||
|
|
||||||
table.addCell("indexing.index_total", "sibling:pri;alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops");
|
table.addCell("indexing.index_total",
|
||||||
|
"sibling:pri;alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops");
|
||||||
table.addCell("pri.indexing.index_total", "default:false;text-align:right;desc:number of indexing ops");
|
table.addCell("pri.indexing.index_total", "default:false;text-align:right;desc:number of indexing ops");
|
||||||
|
|
||||||
table.addCell("indexing.index_failed", "sibling:pri;alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops");
|
table.addCell("indexing.index_failed",
|
||||||
|
"sibling:pri;alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops");
|
||||||
table.addCell("pri.indexing.index_failed", "default:false;text-align:right;desc:number of failed indexing ops");
|
table.addCell("pri.indexing.index_failed", "default:false;text-align:right;desc:number of failed indexing ops");
|
||||||
|
|
||||||
table.addCell("merges.current", "sibling:pri;alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges");
|
table.addCell("merges.current",
|
||||||
|
"sibling:pri;alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges");
|
||||||
table.addCell("pri.merges.current", "default:false;text-align:right;desc:number of current merges");
|
table.addCell("pri.merges.current", "default:false;text-align:right;desc:number of current merges");
|
||||||
|
|
||||||
table.addCell("merges.current_docs", "sibling:pri;alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs");
|
table.addCell("merges.current_docs",
|
||||||
|
"sibling:pri;alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs");
|
||||||
table.addCell("pri.merges.current_docs", "default:false;text-align:right;desc:number of current merging docs");
|
table.addCell("pri.merges.current_docs", "default:false;text-align:right;desc:number of current merging docs");
|
||||||
|
|
||||||
table.addCell("merges.current_size", "sibling:pri;alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges");
|
table.addCell("merges.current_size",
|
||||||
|
"sibling:pri;alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges");
|
||||||
table.addCell("pri.merges.current_size", "default:false;text-align:right;desc:size of current merges");
|
table.addCell("pri.merges.current_size", "default:false;text-align:right;desc:size of current merges");
|
||||||
|
|
||||||
table.addCell("merges.total", "sibling:pri;alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops");
|
table.addCell("merges.total",
|
||||||
|
"sibling:pri;alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops");
|
||||||
table.addCell("pri.merges.total", "default:false;text-align:right;desc:number of completed merge ops");
|
table.addCell("pri.merges.total", "default:false;text-align:right;desc:number of completed merge ops");
|
||||||
|
|
||||||
table.addCell("merges.total_docs", "sibling:pri;alias:mtd,mergesTotalDocs;default:false;text-align:right;desc:docs merged");
|
table.addCell("merges.total_docs", "sibling:pri;alias:mtd,mergesTotalDocs;default:false;text-align:right;desc:docs merged");
|
||||||
@ -258,7 +282,8 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
table.addCell("merges.total_size", "sibling:pri;alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged");
|
table.addCell("merges.total_size", "sibling:pri;alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged");
|
||||||
table.addCell("pri.merges.total_size", "default:false;text-align:right;desc:size merged");
|
table.addCell("pri.merges.total_size", "default:false;text-align:right;desc:size merged");
|
||||||
|
|
||||||
table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges");
|
table.addCell("merges.total_time",
|
||||||
|
"sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges");
|
||||||
table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges");
|
table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges");
|
||||||
|
|
||||||
table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
|
table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
|
||||||
@ -267,37 +292,48 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
table.addCell("refresh.time", "sibling:pri;alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes");
|
table.addCell("refresh.time", "sibling:pri;alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes");
|
||||||
table.addCell("pri.refresh.time", "default:false;text-align:right;desc:time spent in refreshes");
|
table.addCell("pri.refresh.time", "default:false;text-align:right;desc:time spent in refreshes");
|
||||||
|
|
||||||
table.addCell("refresh.listeners", "sibling:pri;alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners");
|
table.addCell("refresh.listeners",
|
||||||
|
"sibling:pri;alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners");
|
||||||
table.addCell("pri.refresh.listeners", "default:false;text-align:right;desc:number of pending refresh listeners");
|
table.addCell("pri.refresh.listeners", "default:false;text-align:right;desc:number of pending refresh listeners");
|
||||||
|
|
||||||
table.addCell("search.fetch_current", "sibling:pri;alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops");
|
table.addCell("search.fetch_current",
|
||||||
|
"sibling:pri;alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops");
|
||||||
table.addCell("pri.search.fetch_current", "default:false;text-align:right;desc:current fetch phase ops");
|
table.addCell("pri.search.fetch_current", "default:false;text-align:right;desc:current fetch phase ops");
|
||||||
|
|
||||||
table.addCell("search.fetch_time", "sibling:pri;alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase");
|
table.addCell("search.fetch_time",
|
||||||
|
"sibling:pri;alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase");
|
||||||
table.addCell("pri.search.fetch_time", "default:false;text-align:right;desc:time spent in fetch phase");
|
table.addCell("pri.search.fetch_time", "default:false;text-align:right;desc:time spent in fetch phase");
|
||||||
|
|
||||||
table.addCell("search.fetch_total", "sibling:pri;alias:sfto,searchFetchTotal;default:false;text-align:right;desc:total fetch ops");
|
table.addCell("search.fetch_total",
|
||||||
|
"sibling:pri;alias:sfto,searchFetchTotal;default:false;text-align:right;desc:total fetch ops");
|
||||||
table.addCell("pri.search.fetch_total", "default:false;text-align:right;desc:total fetch ops");
|
table.addCell("pri.search.fetch_total", "default:false;text-align:right;desc:total fetch ops");
|
||||||
|
|
||||||
table.addCell("search.open_contexts", "sibling:pri;alias:so,searchOpenContexts;default:false;text-align:right;desc:open search contexts");
|
table.addCell("search.open_contexts",
|
||||||
|
"sibling:pri;alias:so,searchOpenContexts;default:false;text-align:right;desc:open search contexts");
|
||||||
table.addCell("pri.search.open_contexts", "default:false;text-align:right;desc:open search contexts");
|
table.addCell("pri.search.open_contexts", "default:false;text-align:right;desc:open search contexts");
|
||||||
|
|
||||||
table.addCell("search.query_current", "sibling:pri;alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops");
|
table.addCell("search.query_current",
|
||||||
|
"sibling:pri;alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops");
|
||||||
table.addCell("pri.search.query_current", "default:false;text-align:right;desc:current query phase ops");
|
table.addCell("pri.search.query_current", "default:false;text-align:right;desc:current query phase ops");
|
||||||
|
|
||||||
table.addCell("search.query_time", "sibling:pri;alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase");
|
table.addCell("search.query_time",
|
||||||
|
"sibling:pri;alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase");
|
||||||
table.addCell("pri.search.query_time", "default:false;text-align:right;desc:time spent in query phase");
|
table.addCell("pri.search.query_time", "default:false;text-align:right;desc:time spent in query phase");
|
||||||
|
|
||||||
table.addCell("search.query_total", "sibling:pri;alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops");
|
table.addCell("search.query_total",
|
||||||
|
"sibling:pri;alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops");
|
||||||
table.addCell("pri.search.query_total", "default:false;text-align:right;desc:total query phase ops");
|
table.addCell("pri.search.query_total", "default:false;text-align:right;desc:total query phase ops");
|
||||||
|
|
||||||
table.addCell("search.scroll_current", "sibling:pri;alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts");
|
table.addCell("search.scroll_current",
|
||||||
|
"sibling:pri;alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts");
|
||||||
table.addCell("pri.search.scroll_current", "default:false;text-align:right;desc:open scroll contexts");
|
table.addCell("pri.search.scroll_current", "default:false;text-align:right;desc:open scroll contexts");
|
||||||
|
|
||||||
table.addCell("search.scroll_time", "sibling:pri;alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open");
|
table.addCell("search.scroll_time",
|
||||||
|
"sibling:pri;alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open");
|
||||||
table.addCell("pri.search.scroll_time", "default:false;text-align:right;desc:time scroll contexts held open");
|
table.addCell("pri.search.scroll_time", "default:false;text-align:right;desc:time scroll contexts held open");
|
||||||
|
|
||||||
table.addCell("search.scroll_total", "sibling:pri;alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts");
|
table.addCell("search.scroll_total",
|
||||||
|
"sibling:pri;alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts");
|
||||||
table.addCell("pri.search.scroll_total", "default:false;text-align:right;desc:completed scroll contexts");
|
table.addCell("pri.search.scroll_total", "default:false;text-align:right;desc:completed scroll contexts");
|
||||||
|
|
||||||
table.addCell("segments.count", "sibling:pri;alias:sc,segmentsCount;default:false;text-align:right;desc:number of segments");
|
table.addCell("segments.count", "sibling:pri;alias:sc,segmentsCount;default:false;text-align:right;desc:number of segments");
|
||||||
@ -306,14 +342,20 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
table.addCell("segments.memory", "sibling:pri;alias:sm,segmentsMemory;default:false;text-align:right;desc:memory used by segments");
|
table.addCell("segments.memory", "sibling:pri;alias:sm,segmentsMemory;default:false;text-align:right;desc:memory used by segments");
|
||||||
table.addCell("pri.segments.memory", "default:false;text-align:right;desc:memory used by segments");
|
table.addCell("pri.segments.memory", "default:false;text-align:right;desc:memory used by segments");
|
||||||
|
|
||||||
table.addCell("segments.index_writer_memory", "sibling:pri;alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer");
|
table.addCell("segments.index_writer_memory",
|
||||||
|
"sibling:pri;alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer");
|
||||||
table.addCell("pri.segments.index_writer_memory", "default:false;text-align:right;desc:memory used by index writer");
|
table.addCell("pri.segments.index_writer_memory", "default:false;text-align:right;desc:memory used by index writer");
|
||||||
|
|
||||||
table.addCell("segments.version_map_memory", "sibling:pri;alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map");
|
table.addCell("segments.version_map_memory",
|
||||||
|
"sibling:pri;alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map");
|
||||||
table.addCell("pri.segments.version_map_memory", "default:false;text-align:right;desc:memory used by version map");
|
table.addCell("pri.segments.version_map_memory", "default:false;text-align:right;desc:memory used by version map");
|
||||||
|
|
||||||
table.addCell("segments.fixed_bitset_memory", "sibling:pri;alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for nested object field types and type filters for types referred in _parent fields");
|
table.addCell("segments.fixed_bitset_memory",
|
||||||
table.addCell("pri.segments.fixed_bitset_memory", "default:false;text-align:right;desc:memory used by fixed bit sets for nested object field types and type filters for types referred in _parent fields");
|
"sibling:pri;alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for" +
|
||||||
|
" nested object field types and type filters for types referred in _parent fields");
|
||||||
|
table.addCell("pri.segments.fixed_bitset_memory",
|
||||||
|
"default:false;text-align:right;desc:memory used by fixed bit sets for nested object" +
|
||||||
|
" field types and type filters for types referred in _parent fields");
|
||||||
|
|
||||||
table.addCell("warmer.current", "sibling:pri;alias:wc,warmerCurrent;default:false;text-align:right;desc:current warmer ops");
|
table.addCell("warmer.current", "sibling:pri;alias:wc,warmerCurrent;default:false;text-align:right;desc:current warmer ops");
|
||||||
table.addCell("pri.warmer.current", "default:false;text-align:right;desc:current warmer ops");
|
table.addCell("pri.warmer.current", "default:false;text-align:right;desc:current warmer ops");
|
||||||
@ -321,10 +363,12 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
table.addCell("warmer.total", "sibling:pri;alias:wto,warmerTotal;default:false;text-align:right;desc:total warmer ops");
|
table.addCell("warmer.total", "sibling:pri;alias:wto,warmerTotal;default:false;text-align:right;desc:total warmer ops");
|
||||||
table.addCell("pri.warmer.total", "default:false;text-align:right;desc:total warmer ops");
|
table.addCell("pri.warmer.total", "default:false;text-align:right;desc:total warmer ops");
|
||||||
|
|
||||||
table.addCell("warmer.total_time", "sibling:pri;alias:wtt,warmerTotalTime;default:false;text-align:right;desc:time spent in warmers");
|
table.addCell("warmer.total_time",
|
||||||
|
"sibling:pri;alias:wtt,warmerTotalTime;default:false;text-align:right;desc:time spent in warmers");
|
||||||
table.addCell("pri.warmer.total_time", "default:false;text-align:right;desc:time spent in warmers");
|
table.addCell("pri.warmer.total_time", "default:false;text-align:right;desc:time spent in warmers");
|
||||||
|
|
||||||
table.addCell("suggest.current", "sibling:pri;alias:suc,suggestCurrent;default:false;text-align:right;desc:number of current suggest ops");
|
table.addCell("suggest.current",
|
||||||
|
"sibling:pri;alias:suc,suggestCurrent;default:false;text-align:right;desc:number of current suggest ops");
|
||||||
table.addCell("pri.suggest.current", "default:false;text-align:right;desc:number of current suggest ops");
|
table.addCell("pri.suggest.current", "default:false;text-align:right;desc:number of current suggest ops");
|
||||||
|
|
||||||
table.addCell("suggest.time", "sibling:pri;alias:suti,suggestTime;default:false;text-align:right;desc:time spend in suggest");
|
table.addCell("suggest.time", "sibling:pri;alias:suti,suggestTime;default:false;text-align:right;desc:time spend in suggest");
|
||||||
@ -343,7 +387,8 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// package private for testing
|
// package private for testing
|
||||||
Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse response, IndicesStatsResponse stats, MetaData indexMetaDatas) {
|
Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse response,
|
||||||
|
IndicesStatsResponse stats, MetaData indexMetaDatas) {
|
||||||
final String healthParam = request.param("health");
|
final String healthParam = request.param("health");
|
||||||
final ClusterHealthStatus status;
|
final ClusterHealthStatus status;
|
||||||
if (healthParam != null) {
|
if (healthParam != null) {
|
||||||
@ -374,7 +419,8 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||||||
final CommonStats totalStats = indexStats == null ? new CommonStats() : indexStats.getTotal();
|
final CommonStats totalStats = indexStats == null ? new CommonStats() : indexStats.getTotal();
|
||||||
|
|
||||||
table.startRow();
|
table.startRow();
|
||||||
table.addCell(state == IndexMetaData.State.OPEN ? (indexHealth == null ? "red*" : indexHealth.getStatus().toString().toLowerCase(Locale.ROOT)) : null);
|
table.addCell(state == IndexMetaData.State.OPEN ?
|
||||||
|
(indexHealth == null ? "red*" : indexHealth.getStatus().toString().toLowerCase(Locale.ROOT)) : null);
|
||||||
table.addCell(state.toString().toLowerCase(Locale.ROOT));
|
table.addCell(state.toString().toLowerCase(Locale.ROOT));
|
||||||
table.addCell(indexName);
|
table.addCell(indexName);
|
||||||
table.addCell(index.getUUID());
|
table.addCell(index.getUUID());
|
||||||
|
@ -142,16 +142,20 @@ public class RestShardsAction extends AbstractCatAction {
|
|||||||
table.addCell("get.missing_time", "alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets");
|
table.addCell("get.missing_time", "alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets");
|
||||||
table.addCell("get.missing_total", "alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets");
|
table.addCell("get.missing_total", "alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets");
|
||||||
|
|
||||||
table.addCell("indexing.delete_current", "alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions");
|
table.addCell("indexing.delete_current",
|
||||||
|
"alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions");
|
||||||
table.addCell("indexing.delete_time", "alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions");
|
table.addCell("indexing.delete_time", "alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions");
|
||||||
table.addCell("indexing.delete_total", "alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops");
|
table.addCell("indexing.delete_total", "alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops");
|
||||||
table.addCell("indexing.index_current", "alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops");
|
table.addCell("indexing.index_current",
|
||||||
|
"alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops");
|
||||||
table.addCell("indexing.index_time", "alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing");
|
table.addCell("indexing.index_time", "alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing");
|
||||||
table.addCell("indexing.index_total", "alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops");
|
table.addCell("indexing.index_total", "alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops");
|
||||||
table.addCell("indexing.index_failed", "alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops");
|
table.addCell("indexing.index_failed",
|
||||||
|
"alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops");
|
||||||
|
|
||||||
table.addCell("merges.current", "alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges");
|
table.addCell("merges.current", "alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges");
|
||||||
table.addCell("merges.current_docs", "alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs");
|
table.addCell("merges.current_docs",
|
||||||
|
"alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs");
|
||||||
table.addCell("merges.current_size", "alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges");
|
table.addCell("merges.current_size", "alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges");
|
||||||
table.addCell("merges.total", "alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops");
|
table.addCell("merges.total", "alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops");
|
||||||
table.addCell("merges.total_docs", "alias:mtd,mergesTotalDocs;default:false;text-align:right;desc:docs merged");
|
table.addCell("merges.total_docs", "alias:mtd,mergesTotalDocs;default:false;text-align:right;desc:docs merged");
|
||||||
@ -160,7 +164,8 @@ public class RestShardsAction extends AbstractCatAction {
|
|||||||
|
|
||||||
table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
|
table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
|
||||||
table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes");
|
table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes");
|
||||||
table.addCell("refresh.listeners", "alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners");
|
table.addCell("refresh.listeners",
|
||||||
|
"alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners");
|
||||||
|
|
||||||
table.addCell("search.fetch_current", "alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops");
|
table.addCell("search.fetch_current", "alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops");
|
||||||
table.addCell("search.fetch_time", "alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase");
|
table.addCell("search.fetch_time", "alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase");
|
||||||
@ -170,14 +175,19 @@ public class RestShardsAction extends AbstractCatAction {
|
|||||||
table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase");
|
table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase");
|
||||||
table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops");
|
table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops");
|
||||||
table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts");
|
table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts");
|
||||||
table.addCell("search.scroll_time", "alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open");
|
table.addCell("search.scroll_time",
|
||||||
|
"alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open");
|
||||||
table.addCell("search.scroll_total", "alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts");
|
table.addCell("search.scroll_total", "alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts");
|
||||||
|
|
||||||
table.addCell("segments.count", "alias:sc,segmentsCount;default:false;text-align:right;desc:number of segments");
|
table.addCell("segments.count", "alias:sc,segmentsCount;default:false;text-align:right;desc:number of segments");
|
||||||
table.addCell("segments.memory", "alias:sm,segmentsMemory;default:false;text-align:right;desc:memory used by segments");
|
table.addCell("segments.memory", "alias:sm,segmentsMemory;default:false;text-align:right;desc:memory used by segments");
|
||||||
table.addCell("segments.index_writer_memory", "alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer");
|
table.addCell("segments.index_writer_memory",
|
||||||
table.addCell("segments.version_map_memory", "alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map");
|
"alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer");
|
||||||
table.addCell("segments.fixed_bitset_memory", "alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for nested object field types and type filters for types referred in _parent fields");
|
table.addCell("segments.version_map_memory",
|
||||||
|
"alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map");
|
||||||
|
table.addCell("segments.fixed_bitset_memory",
|
||||||
|
"alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for nested object" +
|
||||||
|
" field types and type filters for types referred in _parent fields");
|
||||||
|
|
||||||
table.addCell("seq_no.max", "alias:sqm,maxSeqNo;default:false;text-align:right;desc:max sequence number");
|
table.addCell("seq_no.max", "alias:sqm,maxSeqNo;default:false;text-align:right;desc:max sequence number");
|
||||||
table.addCell("seq_no.local_checkpoint", "alias:sql,localCheckpoint;default:false;text-align:right;desc:local checkpoint");
|
table.addCell("seq_no.local_checkpoint", "alias:sql,localCheckpoint;default:false;text-align:right;desc:local checkpoint");
|
||||||
|
@ -88,7 +88,8 @@ public class RestThreadPoolAction extends AbstractCatAction {
|
|||||||
client.admin().cluster().nodesStats(nodesStatsRequest, new RestResponseListener<NodesStatsResponse>(channel) {
|
client.admin().cluster().nodesStats(nodesStatsRequest, new RestResponseListener<NodesStatsResponse>(channel) {
|
||||||
@Override
|
@Override
|
||||||
public RestResponse buildResponse(NodesStatsResponse nodesStatsResponse) throws Exception {
|
public RestResponse buildResponse(NodesStatsResponse nodesStatsResponse) throws Exception {
|
||||||
return RestTable.buildResponse(buildTable(request, clusterStateResponse, nodesInfoResponse, nodesStatsResponse), channel);
|
return RestTable.buildResponse(
|
||||||
|
buildTable(request, clusterStateResponse, nodesInfoResponse, nodesStatsResponse), channel);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -166,7 +166,6 @@ public class QueryPhase implements SearchPhase {
|
|||||||
}
|
}
|
||||||
// ... and stop collecting after ${size} matches
|
// ... and stop collecting after ${size} matches
|
||||||
searchContext.terminateAfter(searchContext.size());
|
searchContext.terminateAfter(searchContext.size());
|
||||||
searchContext.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED);
|
|
||||||
} else if (canEarlyTerminate(reader, searchContext.sort())) {
|
} else if (canEarlyTerminate(reader, searchContext.sort())) {
|
||||||
// now this gets interesting: since the search sort is a prefix of the index sort, we can directly
|
// now this gets interesting: since the search sort is a prefix of the index sort, we can directly
|
||||||
// skip to the desired doc
|
// skip to the desired doc
|
||||||
@ -177,7 +176,6 @@ public class QueryPhase implements SearchPhase {
|
|||||||
.build();
|
.build();
|
||||||
query = bq;
|
query = bq;
|
||||||
}
|
}
|
||||||
searchContext.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -217,8 +217,6 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||||||
super(REASON_SEARCH_TOP_HITS, numHits);
|
super(REASON_SEARCH_TOP_HITS, numHits);
|
||||||
this.sortAndFormats = sortAndFormats;
|
this.sortAndFormats = sortAndFormats;
|
||||||
|
|
||||||
// implicit total hit counts are valid only when there is no filter collector in the chain
|
|
||||||
final int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
|
|
||||||
final TopDocsCollector<?> topDocsCollector;
|
final TopDocsCollector<?> topDocsCollector;
|
||||||
if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
|
if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
|
||||||
// don't compute hit counts via the collector
|
// don't compute hit counts via the collector
|
||||||
@ -226,6 +224,8 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||||||
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
||||||
totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
|
totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
|
||||||
} else {
|
} else {
|
||||||
|
// implicit total hit counts are valid only when there is no filter collector in the chain
|
||||||
|
final int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
|
||||||
if (hitCount == -1) {
|
if (hitCount == -1) {
|
||||||
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, trackTotalHitsUpTo);
|
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, trackTotalHitsUpTo);
|
||||||
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
||||||
@ -293,12 +293,11 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||||||
@Override
|
@Override
|
||||||
void postProcess(QuerySearchResult result) throws IOException {
|
void postProcess(QuerySearchResult result) throws IOException {
|
||||||
final TopDocs topDocs = topDocsSupplier.get();
|
final TopDocs topDocs = topDocsSupplier.get();
|
||||||
topDocs.totalHits = totalHitsSupplier.get();
|
final float maxScore;
|
||||||
float maxScore = maxScoreSupplier.get();
|
|
||||||
if (scrollContext.totalHits == null) {
|
if (scrollContext.totalHits == null) {
|
||||||
// first round
|
// first round
|
||||||
scrollContext.totalHits = topDocs.totalHits;
|
topDocs.totalHits = scrollContext.totalHits = totalHitsSupplier.get();
|
||||||
scrollContext.maxScore = maxScore;
|
maxScore = scrollContext.maxScore = maxScoreSupplier.get();
|
||||||
} else {
|
} else {
|
||||||
// subsequent round: the total number of hits and
|
// subsequent round: the total number of hits and
|
||||||
// the maximum score were computed on the first round
|
// the maximum score were computed on the first round
|
||||||
@ -367,7 +366,7 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||||||
// we can disable the tracking of total hits after the initial scroll query
|
// we can disable the tracking of total hits after the initial scroll query
|
||||||
// since the total hits is preserved in the scroll context.
|
// since the total hits is preserved in the scroll context.
|
||||||
int trackTotalHitsUpTo = searchContext.scrollContext().totalHits != null ?
|
int trackTotalHitsUpTo = searchContext.scrollContext().totalHits != null ?
|
||||||
SearchContext.TRACK_TOTAL_HITS_DISABLED : searchContext.trackTotalHitsUpTo();
|
SearchContext.TRACK_TOTAL_HITS_DISABLED : SearchContext.TRACK_TOTAL_HITS_ACCURATE;
|
||||||
// no matter what the value of from is
|
// no matter what the value of from is
|
||||||
int numDocs = Math.min(searchContext.size(), totalNumDocs);
|
int numDocs = Math.min(searchContext.size(), totalNumDocs);
|
||||||
return new ScrollingTopDocsCollectorContext(reader, query, searchContext.scrollContext(),
|
return new ScrollingTopDocsCollectorContext(reader, query, searchContext.scrollContext(),
|
||||||
|
@ -257,7 +257,8 @@ public class ExplainActionIT extends ESIntegTestCase {
|
|||||||
|
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
ExplainResponse explainResponse = client().prepareExplain("test", "type", "1").setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get();
|
ExplainResponse explainResponse = client().prepareExplain("test", "type", "1")
|
||||||
|
.setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get();
|
||||||
assertThat(explainResponse.isExists(), equalTo(true));
|
assertThat(explainResponse.isExists(), equalTo(true));
|
||||||
assertThat(explainResponse.isMatch(), equalTo(true));
|
assertThat(explainResponse.isMatch(), equalTo(true));
|
||||||
}
|
}
|
||||||
|
@ -36,8 +36,10 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.get.GetResult;
|
import org.elasticsearch.index.get.GetResult;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
@ -94,7 +96,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||||||
} else {
|
} else {
|
||||||
indexedShapeToReturn = shape;
|
indexedShapeToReturn = shape;
|
||||||
indexedShapeId = randomAlphaOfLengthBetween(3, 20);
|
indexedShapeId = randomAlphaOfLengthBetween(3, 20);
|
||||||
indexedShapeType = randomAlphaOfLengthBetween(3, 20);
|
indexedShapeType = randomBoolean() ? randomAlphaOfLengthBetween(3, 20) : null;
|
||||||
builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType);
|
builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType);
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
indexedShapeIndex = randomAlphaOfLengthBetween(3, 20);
|
indexedShapeIndex = randomAlphaOfLengthBetween(3, 20);
|
||||||
@ -126,15 +128,17 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected GetResponse executeGet(GetRequest getRequest) {
|
protected GetResponse executeGet(GetRequest getRequest) {
|
||||||
|
String indexedType = indexedShapeType != null ? indexedShapeType : MapperService.SINGLE_MAPPING_NAME;
|
||||||
|
|
||||||
assertThat(indexedShapeToReturn, notNullValue());
|
assertThat(indexedShapeToReturn, notNullValue());
|
||||||
assertThat(indexedShapeId, notNullValue());
|
assertThat(indexedShapeId, notNullValue());
|
||||||
assertThat(indexedShapeType, notNullValue());
|
|
||||||
assertThat(getRequest.id(), equalTo(indexedShapeId));
|
assertThat(getRequest.id(), equalTo(indexedShapeId));
|
||||||
assertThat(getRequest.type(), equalTo(indexedShapeType));
|
assertThat(getRequest.type(), equalTo(indexedType));
|
||||||
assertThat(getRequest.routing(), equalTo(indexedShapeRouting));
|
assertThat(getRequest.routing(), equalTo(indexedShapeRouting));
|
||||||
String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex;
|
String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex;
|
||||||
assertThat(getRequest.index(), equalTo(expectedShapeIndex));
|
assertThat(getRequest.index(), equalTo(expectedShapeIndex));
|
||||||
String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath;
|
String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath;
|
||||||
|
|
||||||
String json;
|
String json;
|
||||||
try {
|
try {
|
||||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||||
@ -146,7 +150,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new ElasticsearchException("boom", ex);
|
throw new ElasticsearchException("boom", ex);
|
||||||
}
|
}
|
||||||
return new GetResponse(new GetResult(indexedShapeIndex, indexedShapeType, indexedShapeId, 0, 1, 0, true, new BytesArray(json),
|
return new GetResponse(new GetResult(indexedShapeIndex, indexedType, indexedShapeId, 0, 1, 0, true, new BytesArray(json),
|
||||||
null));
|
null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,19 +180,13 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testNoShape() throws IOException {
|
public void testNoShape() throws IOException {
|
||||||
expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), null));
|
expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), (ShapeBuilder) null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoIndexedShape() throws IOException {
|
public void testNoIndexedShape() throws IOException {
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> new GeoShapeQueryBuilder(fieldName(), null, "type"));
|
() -> new GeoShapeQueryBuilder(fieldName(), null, "type"));
|
||||||
assertEquals("either shapeBytes or indexedShapeId and indexedShapeType are required", e.getMessage());
|
assertEquals("either shape or indexedShapeId is required", e.getMessage());
|
||||||
}
|
|
||||||
|
|
||||||
public void testNoIndexedShapeType() throws IOException {
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> new GeoShapeQueryBuilder(fieldName(), "id", null));
|
|
||||||
assertEquals("indexedShapeType is required if indexedShapeId is specified", e.getMessage());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoRelation() throws IOException {
|
public void testNoRelation() throws IOException {
|
||||||
@ -286,4 +284,16 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||||||
builder = rewriteAndFetch(builder, createShardContext());
|
builder = rewriteAndFetch(builder, createShardContext());
|
||||||
builder.writeTo(new BytesStreamOutput(10));
|
builder.writeTo(new BytesStreamOutput(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected QueryBuilder parseQuery(XContentParser parser) throws IOException {
|
||||||
|
QueryBuilder query = super.parseQuery(parser);
|
||||||
|
assertThat(query, instanceOf(GeoShapeQueryBuilder.class));
|
||||||
|
|
||||||
|
GeoShapeQueryBuilder shapeQuery = (GeoShapeQueryBuilder) query;
|
||||||
|
if (shapeQuery.indexedShapeType() != null) {
|
||||||
|
assertWarnings(GeoShapeQueryBuilder.TYPES_DEPRECATION_MESSAGE);
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.apache.lucene.search.TermInSetQuery;
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
@ -48,7 +49,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||||||
type = randomAlphaOfLengthBetween(1, 10);
|
type = randomAlphaOfLengthBetween(1, 10);
|
||||||
}
|
}
|
||||||
} else if (randomBoolean()) {
|
} else if (randomBoolean()) {
|
||||||
type = MetaData.ALL;
|
type = MetaData.ALL;
|
||||||
} else {
|
} else {
|
||||||
type = null;
|
type = null;
|
||||||
}
|
}
|
||||||
@ -152,4 +153,16 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||||||
assertThat(parsed.ids(), contains("1","100","4"));
|
assertThat(parsed.ids(), contains("1","100","4"));
|
||||||
assertEquals(json, 0, parsed.types().length);
|
assertEquals(json, 0, parsed.types().length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected QueryBuilder parseQuery(XContentParser parser) throws IOException {
|
||||||
|
QueryBuilder query = super.parseQuery(parser);
|
||||||
|
assertThat(query, instanceOf(IdsQueryBuilder.class));
|
||||||
|
|
||||||
|
IdsQueryBuilder idsQuery = (IdsQueryBuilder) query;
|
||||||
|
if (idsQuery.types().length > 0) {
|
||||||
|
assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,8 +59,7 @@ public class LegacyGeoShapeFieldQueryTests extends GeoShapeQueryBuilderTests {
|
|||||||
} else {
|
} else {
|
||||||
indexedShapeToReturn = shape;
|
indexedShapeToReturn = shape;
|
||||||
indexedShapeId = randomAlphaOfLengthBetween(3, 20);
|
indexedShapeId = randomAlphaOfLengthBetween(3, 20);
|
||||||
indexedShapeType = randomAlphaOfLengthBetween(3, 20);
|
builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId);
|
||||||
builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType);
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
indexedShapeIndex = randomAlphaOfLengthBetween(3, 20);
|
indexedShapeIndex = randomAlphaOfLengthBetween(3, 20);
|
||||||
builder.indexedShapeIndex(indexedShapeIndex);
|
builder.indexedShapeIndex(indexedShapeIndex);
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user